Report generated on 05-Jun-2021 at 02:28:24 by pytest-html v3.1.1
389-ds-base | 2.0.5-20210605gitf53b28448.fc34 |
Packages | {"pluggy": "0.13.1", "py": "1.10.0", "pytest": "5.4.3"} |
Platform | Linux-5.11.15-300.fc34.x86_64-x86_64-with-glibc2.33 |
Plugins | {"flaky": "3.7.0", "html": "3.1.1", "libfaketime": "0.1.2", "metadata": "1.11.0"} |
Python | 3.9.5 |
cyrus-sasl | 2.1.27-8.fc34 |
nspr | 4.30.0-2.fc34 |
nss | 3.65.0-1.fc34 |
openldap | 2.4.57-3.fc34 |
2100 tests ran in 16106.31 seconds.
(Un)check the boxes to filter the results.
1999 passed, 15 skipped, 72 failed, 10 errors, 21 expected failures, 8 unexpected passesResult | Test | Duration | Links |
---|---|---|---|
No results found. Try to check the filters | |||
Error | suites/basic/basic_test.py::collect | ||
ImportError while importing test module '/export/tests/suites/basic/basic_test.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: suites/basic/basic_test.py:25: in <module> from ....conftest import get_rpm_version E ImportError: attempted relative import beyond top-level package | |||
Error | suites/gssapi/simple_gssapi_test.py::test_gssapi_bind::setup | 8.20 | |
request = <SubRequest 'topology_st_gssapi' for <Function test_gssapi_bind>> @pytest.fixture(scope="module") def topology_st_gssapi(request): """Create a DS standalone instance with GSSAPI enabled. This will alter the instance to remove the secure port, to allow GSSAPI to function. """ hostname = socket.gethostname().split('.', 1) # Assert we have a domain setup in some kind. assert len(hostname) == 2 REALM = hostname[1].upper() topology = create_topology({ReplicaRole.STANDALONE: 1}) # Fix the hostname. topology.standalone.host = socket.gethostname() krb = MitKrb5(realm=REALM, debug=DEBUGGING) # Destroy existing realm. > if krb.check_realm(): /usr/local/lib/python3.9/site-packages/lib389/topologies.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/mit_krb5.py:76: in check_realm p = Popen([self.kadmin, '-r', self.realm, '-q', 'list_principals'], /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHI...> args = ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', '-q', 'list_principals'] executable = b'/usr/sbin/kadmin.local', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = {}, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 58, c2pwrite = 61 errread = 64, errwrite = 66, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/kadmin.local' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Error | suites/gssapi/simple_gssapi_test.py::test_invalid_sasl_map::setup | 0.00 | |
request = <SubRequest 'topology_st_gssapi' for <Function test_gssapi_bind>> @pytest.fixture(scope="module") def topology_st_gssapi(request): """Create a DS standalone instance with GSSAPI enabled. This will alter the instance to remove the secure port, to allow GSSAPI to function. """ hostname = socket.gethostname().split('.', 1) # Assert we have a domain setup in some kind. assert len(hostname) == 2 REALM = hostname[1].upper() topology = create_topology({ReplicaRole.STANDALONE: 1}) # Fix the hostname. topology.standalone.host = socket.gethostname() krb = MitKrb5(realm=REALM, debug=DEBUGGING) # Destroy existing realm. > if krb.check_realm(): /usr/local/lib/python3.9/site-packages/lib389/topologies.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/mit_krb5.py:76: in check_realm p = Popen([self.kadmin, '-r', self.realm, '-q', 'list_principals'], /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHI...> args = ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', '-q', 'list_principals'] executable = b'/usr/sbin/kadmin.local', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = {}, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 58, c2pwrite = 61 errread = 64, errwrite = 66, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/kadmin.local' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError | |||
Error | suites/gssapi/simple_gssapi_test.py::test_missing_user::setup | 0.00 | |
request = <SubRequest 'topology_st_gssapi' for <Function test_gssapi_bind>> @pytest.fixture(scope="module") def topology_st_gssapi(request): """Create a DS standalone instance with GSSAPI enabled. This will alter the instance to remove the secure port, to allow GSSAPI to function. """ hostname = socket.gethostname().split('.', 1) # Assert we have a domain setup in some kind. assert len(hostname) == 2 REALM = hostname[1].upper() topology = create_topology({ReplicaRole.STANDALONE: 1}) # Fix the hostname. topology.standalone.host = socket.gethostname() krb = MitKrb5(realm=REALM, debug=DEBUGGING) # Destroy existing realm. > if krb.check_realm(): /usr/local/lib/python3.9/site-packages/lib389/topologies.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/mit_krb5.py:76: in check_realm p = Popen([self.kadmin, '-r', self.realm, '-q', 'list_principals'], /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHI...> args = ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', '-q', 'list_principals'] executable = b'/usr/sbin/kadmin.local', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = {}, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 58, c2pwrite = 61 errread = 64, errwrite = 66, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/kadmin.local' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError | |||
Error | suites/gssapi/simple_gssapi_test.py::test_support_mech::setup | 0.00 | |
request = <SubRequest 'topology_st_gssapi' for <Function test_gssapi_bind>> @pytest.fixture(scope="module") def topology_st_gssapi(request): """Create a DS standalone instance with GSSAPI enabled. This will alter the instance to remove the secure port, to allow GSSAPI to function. """ hostname = socket.gethostname().split('.', 1) # Assert we have a domain setup in some kind. assert len(hostname) == 2 REALM = hostname[1].upper() topology = create_topology({ReplicaRole.STANDALONE: 1}) # Fix the hostname. topology.standalone.host = socket.gethostname() krb = MitKrb5(realm=REALM, debug=DEBUGGING) # Destroy existing realm. > if krb.check_realm(): /usr/local/lib/python3.9/site-packages/lib389/topologies.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/mit_krb5.py:76: in check_realm p = Popen([self.kadmin, '-r', self.realm, '-q', 'list_principals'], /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHI...> args = ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', '-q', 'list_principals'] executable = b'/usr/sbin/kadmin.local', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = {}, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 58, c2pwrite = 61 errread = 64, errwrite = 66, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/kadmin.local' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError | |||
Error | suites/gssapi/simple_gssapi_test.py::test_rejected_mech::setup | 0.00 | |
request = <SubRequest 'topology_st_gssapi' for <Function test_gssapi_bind>> @pytest.fixture(scope="module") def topology_st_gssapi(request): """Create a DS standalone instance with GSSAPI enabled. This will alter the instance to remove the secure port, to allow GSSAPI to function. """ hostname = socket.gethostname().split('.', 1) # Assert we have a domain setup in some kind. assert len(hostname) == 2 REALM = hostname[1].upper() topology = create_topology({ReplicaRole.STANDALONE: 1}) # Fix the hostname. topology.standalone.host = socket.gethostname() krb = MitKrb5(realm=REALM, debug=DEBUGGING) # Destroy existing realm. > if krb.check_realm(): /usr/local/lib/python3.9/site-packages/lib389/topologies.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/mit_krb5.py:76: in check_realm p = Popen([self.kadmin, '-r', self.realm, '-q', 'list_principals'], /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHI...> args = ['/usr/sbin/kadmin.local', '-r', 'HOSTED.UPSHIFT.RDU2.REDHAT.COM', '-q', 'list_principals'] executable = b'/usr/sbin/kadmin.local', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = {}, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 58, c2pwrite = 61 errread = 64, errwrite = 66, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/kadmin.local' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError | |||
Error | tickets/ticket48973_test.py::test_ticket48973_init::setup | 0.72 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:831: in create self._createDirsrv(version) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:801: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.9/site-packages/lib389/utils.py:1246: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... | |||
Error | tickets/ticket48973_test.py::test_ticket48973_ces_not_indexed::setup | 0.00 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:831: in create self._createDirsrv(version) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:801: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.9/site-packages/lib389/utils.py:1246: AssertionError | |||
Error | tickets/ticket48973_test.py::test_ticket48973_homeDirectory_indexing::setup | 0.00 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:831: in create self._createDirsrv(version) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:801: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.9/site-packages/lib389/utils.py:1246: AssertionError | |||
Error | tickets/ticket48973_test.py::test_ticket48973_homeDirectory_caseExactIA5Match_caseIgnoreIA5Match_indexing::setup | 0.00 | |
request = <SubRequest 'topology' for <Function test_ticket48973_init>> @pytest.fixture(scope="module") def topology(request): # Creating standalone instance ... standalone = DirSrv(verbose=False) args_instance[SER_HOST] = HOST_STANDALONE args_instance[SER_PORT] = PORT_STANDALONE args_instance[SER_SERVERID_PROP] = SERVERID_STANDALONE args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_standalone = args_instance.copy() standalone.allocate(args_standalone) instance_standalone = standalone.exists() if instance_standalone: standalone.delete() > standalone.create() /export/tests/tickets/ticket48973_test.py:52: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:831: in create self._createDirsrv(version) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:801: in _createDirsrv sds.create_from_args(general, slapd, backends, None) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:663: in create_from_args self._prepare_ds(general, slapd, backends) /usr/local/lib/python3.9/site-packages/lib389/instance/setup.py:594: in _prepare_ds assert_c(slapd['root_dn'] is not None, "Configuration root_dn in section [slapd] not found") _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ condition = False, msg = 'Configuration root_dn in section [slapd] not found' def assert_c(condition, msg="Assertion Failed"): """This is the same as assert, but assert is compiled out when optimisation is enabled. This prevents compiling out. """ if not condition: > raise AssertionError(msg) E AssertionError: Configuration root_dn in section [slapd] not found /usr/local/lib/python3.9/site-packages/lib389/utils.py:1246: AssertionError | |||
Failed | suites/acl/keywords_part2_test.py::test_access_from_certain_network_only_ip | 13.24 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99e2d8160> add_user = None, aci_of_user = None def test_access_from_certain_network_only_ip(topo, add_user, aci_of_user): """ User can access the data when connecting from certain network only as per the ACI. :id: 4ec38296-7ac5-11e8-9816-8c16451d917b :customerscenario: True :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ # Turn access log buffering off to make less time consuming topo.standalone.config.set('nsslapd-accesslog-logbuffering', 'off') # Find the ip from ds logs , as we need to know the exact ip used by ds to run the instances. # Wait till Access Log is generated topo.standalone.restart() hostname = socket.gethostname() IP = socket.gethostbyname(hostname) # Add ACI domain = Domain(topo.standalone, DEFAULT_SUFFIX) domain.add("aci", f'(target = "ldap:///{IP_OU_KEY}")(targetattr=\"*\")(version 3.0; aci "IP aci"; ' f'allow(all)userdn = "ldap:///{NETSCAPEIP_KEY}" and (ip = "127.0.0.1" or ip = "::1" or ip = "{IP}") ;)') # create a new connection for the test conn = UserAccount(topo.standalone, NETSCAPEIP_KEY).bind(PW_DM) # Perform Operation org = OrganizationalUnit(conn, IP_OU_KEY) org.replace("seeAlso", "cn=1") # remove the aci domain.ensure_removed("aci", f'(target = "ldap:///{IP_OU_KEY}")(targetattr=\"*\")(version 3.0; aci ' f'"IP aci"; allow(all)userdn = "ldap:///{NETSCAPEIP_KEY}" and ' f'(ip = "127.0.0.1" or ip = "::1" or ip = "{IP}") ;)') # Now add aci with new ip domain.add("aci", f'(target = "ldap:///{IP_OU_KEY}")(targetattr="*")(version 3.0; aci "IP aci"; ' f'allow(all)userdn = "ldap:///{NETSCAPEIP_KEY}" and ip = "100.1.1.1" ;)') # After changing the ip user cant access data with pytest.raises(ldap.INSUFFICIENT_ACCESS): > org.replace("seeAlso", "cn=1") E Failed: DID NOT RAISE <class 'ldap.INSUFFICIENT_ACCESS'> suites/acl/keywords_part2_test.py:93: Failed -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_any_machine | 0.30 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99f3094c0> add_user = None, aci_of_user = None @pytest.mark.flaky(max_runs=2, min_passes=1) def test_user_can_access_the_data_when_connecting_from_any_machine( topo, add_user, aci_of_user ): """User can access the data when connecting from any machine as per the ACI. :id: 28cbc008-7ac5-11e8-934e-8c16451d917b :customerscenario: True :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ # Add ACI Domain(topo.standalone, DEFAULT_SUFFIX)\ .add("aci", f'(target ="ldap:///{DNS_OU_KEY}")' f'(targetattr="*")(version 3.0; aci "DNS aci"; allow(all) ' f'userdn = "ldap:///{FULLDNS_KEY}" and dns = "*" ;)') # Create a new connection for this test. conn = UserAccount(topo.standalone, FULLDNS_KEY).bind(PW_DM) # Perform Operation > OrganizationalUnit(conn, DNS_OU_KEY).replace("seeAlso", "cn=1") suites/acl/keywords_test.py:254: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:281: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99e543730> func = <built-in method result4 of LDAP object at 0x7ff99e6e83c0> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=dns,ou=keywords,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
Failed | suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_internal_ds_network_only | 0.31 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99f3094c0> add_user = None, aci_of_user = None @pytest.mark.flaky(max_runs=2, min_passes=1) def test_user_can_access_the_data_when_connecting_from_internal_ds_network_only( topo, add_user, aci_of_user ): """User can access the data when connecting from internal ICNC network only as per the ACI. :id: 2cac2136-7ac5-11e8-8328-8c16451d917b :customerscenario: True :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ dns_name = socket.getfqdn() # Add ACI Domain(topo.standalone, DEFAULT_SUFFIX).\ add("aci", [f'(target = "ldap:///{DNS_OU_KEY}")(targetattr="*")' f'(version 3.0; aci "DNS aci"; allow(all) ' f'userdn = "ldap:///{SUNDNS_KEY}" and ' f'(dns = "*redhat.com" or dns = "{dns_name}");)']) # Create a new connection for this test. conn = UserAccount(topo.standalone, SUNDNS_KEY).bind(PW_DM) # Perform Operation > OrganizationalUnit(conn, DNS_OU_KEY).replace("seeAlso", "cn=1") suites/acl/keywords_test.py:287: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:281: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99ebc19a0> func = <built-in method result4 of LDAP object at 0x7ff9a0251180> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=dns,ou=keywords,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
Failed | suites/acl/keywords_test.py::test_user_can_access_the_data_when_connecting_from_some_network_only | 0.30 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99f3094c0> add_user = None, aci_of_user = None @pytest.mark.flaky(max_runs=2, min_passes=1) def test_user_can_access_the_data_when_connecting_from_some_network_only( topo, add_user, aci_of_user ): """User can access the data when connecting from some network only as per the ACI. :id: 3098512a-7ac5-11e8-af85-8c16451d917b :customerscenario: True :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ dns_name = socket.getfqdn() # Add ACI Domain(topo.standalone, DEFAULT_SUFFIX)\ .add("aci", f'(target = "ldap:///{DNS_OU_KEY}")' f'(targetattr="*")(version 3.0; aci "DNS aci"; allow(all) ' f'userdn = "ldap:///{NETSCAPEDNS_KEY}" ' f'and dns = "{dns_name}" ;)') # Create a new connection for this test. conn = UserAccount(topo.standalone, NETSCAPEDNS_KEY).bind(PW_DM) # Perform Operation > OrganizationalUnit(conn, DNS_OU_KEY).replace("seeAlso", "cn=1") suites/acl/keywords_test.py:319: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:281: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99e68e8e0> func = <built-in method result4 of LDAP object at 0x7ff99e6b27e0> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=dns,ou=keywords,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
Failed | suites/acl/keywords_test.py::test_from_an_unauthorized_network | 0.33 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99f3094c0> add_user = None, aci_of_user = None @pytest.mark.flaky(max_runs=2, min_passes=1) def test_from_an_unauthorized_network(topo, add_user, aci_of_user): """User cannot access the data when connecting from an unauthorized network as per the ACI. :id: 34cf9726-7ac5-11e8-bc12-8c16451d917b :customerscenario: True :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ # Add ACI Domain(topo.standalone, DEFAULT_SUFFIX).\ add("aci", f'(target = "ldap:///{DNS_OU_KEY}")' f'(targetattr="*")(version 3.0; aci "DNS aci"; allow(all) ' f'userdn = "ldap:///{NETSCAPEDNS_KEY}" and dns != "red.iplanet.com" ;)') # Create a new connection for this test. conn = UserAccount(topo.standalone, NETSCAPEDNS_KEY).bind(PW_DM) # Perform Operation > OrganizationalUnit(conn, DNS_OU_KEY).replace("seeAlso", "cn=1") suites/acl/keywords_test.py:347: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:281: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99ea5bf40> func = <built-in method result4 of LDAP object at 0x7ff99f508cc0> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=dns,ou=keywords,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
Failed | suites/acl/keywords_test.py::test_user_cannot_access_the_data_when_connecting_from_an_unauthorized_network_2 | 0.36 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99f3094c0> add_user = None, aci_of_user = None @pytest.mark.flaky(max_runs=2, min_passes=1) def test_user_cannot_access_the_data_when_connecting_from_an_unauthorized_network_2( topo, add_user, aci_of_user): """User cannot access the data when connecting from an unauthorized network as per the ACI. :id: 396bdd44-7ac5-11e8-8014-8c16451d917b :customerscenario: True :setup: Standalone Server :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ # Add ACI Domain(topo.standalone, DEFAULT_SUFFIX).\ add("aci", f'(target = "ldap:///{DNS_OU_KEY}")' f'(targetattr="*")(version 3.0; aci "DNS aci"; allow(all) ' f'userdn = "ldap:///{NETSCAPEDNS_KEY}" ' f'and dnsalias != "www.redhat.com" ;)') # Create a new connection for this test. conn = UserAccount(topo.standalone, NETSCAPEDNS_KEY).bind(PW_DM) # Perform Operation > OrganizationalUnit(conn, DNS_OU_KEY).replace("seeAlso", "cn=1") suites/acl/keywords_test.py:377: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:281: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99d1719a0> func = <built-in method result4 of LDAP object at 0x7ff99ea44db0> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 3, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=dns,ou=keywords,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
Failed | suites/clu/repl_monitor_test.py::test_dsconf_replication_monitor | 68.04 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99d27e370> set_log_file = None @pytest.mark.flaky(max_runs=2, min_passes=1) @pytest.mark.ds50545 @pytest.mark.bz1739718 @pytest.mark.skipif(ds_is_older("1.4.0"), reason="Not implemented") def test_dsconf_replication_monitor(topology_m2, set_log_file): """Test replication monitor that was ported from legacy tools :id: ce48020d-7c30-41b7-8f68-144c9cd757f6 :setup: 2 MM topology :steps: 1. Create DS instance 2. Run replication monitor with connections option 3. Run replication monitor with aliases option 4. Run replication monitor with --json option 5. Run replication monitor with .dsrc file created 6. Run replication monitor with connections option as if using dsconf CLI :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success """ m1 = topology_m2.ms["supplier1"] m2 = topology_m2.ms["supplier2"] # Enable ldapi if not already done. for inst in [topology_m2.ms["supplier1"], topology_m2.ms["supplier2"]]: if not inst.can_autobind(): # Update ns-slapd instance inst.config.set('nsslapd-ldapilisten', 'on') inst.config.set('nsslapd-ldapiautobind', 'on') inst.restart() # Ensure that updates have been sent both ways. replicas = Replicas(m1) replica = replicas.get(DEFAULT_SUFFIX) replica.test_replication([m2]) replicas = Replicas(m2) replica = replicas.get(DEFAULT_SUFFIX) replica.test_replication([m1]) alias_content = ['Supplier: M1 (' + m1.host + ':' + str(m1.port) + ')', 'Supplier: M2 (' + m2.host + ':' + str(m2.port) + ')'] connection_content = 'Supplier: '+ m1.host + ':' + str(m1.port) content_list = ['Replica Root: dc=example,dc=com', 'Replica ID: 1', 'Replica Status: Available', 'Max CSN', 'Status For Agreement: "002" ('+ m2.host + ':' + str(m2.port) + ')', 'Replica Enabled: on', 'Update In Progress: FALSE', 'Last Update Start:', 'Last Update End:', 'Number Of Changes Sent:', 'Number Of Changes Skipped: None', 'Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded', 'Last Init Start:', 'Last Init End:', 'Last Init Status:', 'Reap Active: 0', 'Replication Status: In Synchronization', 'Replication Lag Time:', 'Supplier: ', m2.host + ':' + str(m2.port), 'Replica Root: dc=example,dc=com', 'Replica ID: 2', 'Status For Agreement: "001" (' + m1.host + ':' + str(m1.port)+')'] error_list = ['consumer (Unavailable)', 'Failed to retrieve database RUV entry from consumer'] json_list = ['type', 'list', 'items', 'name', m1.host + ':' + str(m1.port), 'data', '"replica_id": "1"', '"replica_root": "dc=example,dc=com"', '"replica_status": "Available"', 'maxcsn', 'agmts_status', 'agmt-name', '002', 'replica', m2.host + ':' + str(m2.port), 'replica-enabled', 'update-in-progress', 'last-update-start', 'last-update-end', 'number-changes-sent', 'number-changes-skipped', 'last-update-status', 'Error (0) Replica acquired successfully: Incremental update succeeded', 'last-init-start', 'last-init-end', 'last-init-status', 'reap-active', 'replication-status', 'In Synchronization', 'replication-lag-time', '"replica_id": "2"', '001', m1.host + ':' + str(m1.port)] connections = [m1.host + ':' + str(m1.port) + ':' + DN_DM + ':' + PW_DM, m2.host + ':' + str(m2.port) + ':' + DN_DM + ':' + PW_DM] args = FakeArgs() args.connections = connections args.aliases = None args.json = False log.info('Run replication monitor with connections option') get_repl_monitor_info(m1, DEFAULT_SUFFIX, log, args) (host_m1, host_m2) = get_hostnames_from_log(m1.port, m2.port) > check_value_in_log_and_reset(content_list, connection_content, error_list=error_list) suites/clu/repl_monitor_test.py:213: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ content_list = ['Replica Root: dc=example,dc=com', 'Replica ID: 1', 'Replica Status: Available', 'Max CSN', 'Status For Agreement: "002" (ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002)', 'Replica Enabled: on', ...] second_list = 'Supplier: ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001' single_value = None error_list = ['consumer (Unavailable)', 'Failed to retrieve database RUV entry from consumer'] def check_value_in_log_and_reset(content_list, second_list=None, single_value=None, error_list=None): with open(LOG_FILE, 'r+') as f: file_content = f.read() for item in content_list: log.info('Check that "{}" is present'.format(item)) > assert item in file_content E AssertionError: assert 'Replica Status: Available' in 'Run replication monitor with connections option\ndsrc path: /root/.dsrc\ndsrc container path: /data/config/container....nsumer (60badf3a000400020000) State (green) Reason (Replication still in progress)\nReplication Lag Time: 00:00:00\n\n' suites/clu/repl_monitor_test.py:54: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f253cec9-ab3b-46a2-8574-2690bda8417e / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 954208b5-2d33-4299-8fff-94e457653fbc / got description=f253cec9-ab3b-46a2-8574-2690bda8417e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:210 Run replication monitor with connections option DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:76 dsrc path: /root/.dsrc DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:77 dsrc container path: /data/config/container.inf DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:85 dsrc instances: [] DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:212 dsrc completed with {'connections': None, 'aliases': None} INFO tests.suites.clu.repl_monitor_test:replication.py:446 Supplier: localhost.localdomain:39001 INFO tests.suites.clu.repl_monitor_test:replication.py:450 ------------------------------------- INFO tests.suites.clu.repl_monitor_test:replication.py:464 Replica Root: dc=example,dc=com INFO tests.suites.clu.repl_monitor_test:replication.py:465 Replica ID: 1 INFO tests.suites.clu.repl_monitor_test:replication.py:466 Replica Status: Online INFO tests.suites.clu.repl_monitor_test:replication.py:467 Max CSN: 60badf11000000010000 INFO tests.suites.clu.repl_monitor_test:replication.py:470 Status For Agreement: "002" (ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002) Replica Enabled: on Update In Progress: FALSE Last Update Start: 20210605021900Z Last Update End: 20210605021900Z Number Of Changes Sent: 1:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update succeeded Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO tests.suites.clu.repl_monitor_test:replication.py:446 Supplier: ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO tests.suites.clu.repl_monitor_test:replication.py:450 ---------------------------------------------------------------- INFO tests.suites.clu.repl_monitor_test:replication.py:464 Replica Root: dc=example,dc=com INFO tests.suites.clu.repl_monitor_test:replication.py:465 Replica ID: 2 INFO tests.suites.clu.repl_monitor_test:replication.py:466 Replica Status: Online INFO tests.suites.clu.repl_monitor_test:replication.py:467 Max CSN: 60badf14000000020000 INFO tests.suites.clu.repl_monitor_test:replication.py:470 Status For Agreement: "001" (ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001) Replica Enabled: on Update In Progress: TRUE Last Update Start: 20210605021900Z Last Update End: 19700101000000Z Number Of Changes Sent: 2:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update started Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:53 Check that "Replica Root: dc=example,dc=com" is present INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:53 Check that "Replica ID: 1" is present INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:53 Check that "Replica Status: Available" is present -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:38 Delete files -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d7261df9-5371-4be3-aa88-3f2de899c382 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4fa8eb03-e1e0-4c4e-903b-e8d60a321729 / got description=d7261df9-5371-4be3-aa88-3f2de899c382) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:210 Run replication monitor with connections option DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:76 dsrc path: /root/.dsrc DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:77 dsrc container path: /data/config/container.inf DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:85 dsrc instances: [] DEBUG tests.suites.clu.repl_monitor_test:dsrc.py:212 dsrc completed with {'connections': None, 'aliases': None} INFO tests.suites.clu.repl_monitor_test:replication.py:446 Supplier: localhost.localdomain:39001 INFO tests.suites.clu.repl_monitor_test:replication.py:450 ------------------------------------- INFO tests.suites.clu.repl_monitor_test:replication.py:464 Replica Root: dc=example,dc=com INFO tests.suites.clu.repl_monitor_test:replication.py:465 Replica ID: 1 INFO tests.suites.clu.repl_monitor_test:replication.py:466 Replica Status: Online INFO tests.suites.clu.repl_monitor_test:replication.py:467 Max CSN: 60badf3a000000010000 INFO tests.suites.clu.repl_monitor_test:replication.py:470 Status For Agreement: "002" (ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002) Replica Enabled: on Update In Progress: TRUE Last Update Start: 20210605021938Z Last Update End: 19700101000000Z Number Of Changes Sent: 1:2/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update started Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: In Synchronization Replication Lag Time: 00:00:00 INFO tests.suites.clu.repl_monitor_test:replication.py:446 Supplier: ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO tests.suites.clu.repl_monitor_test:replication.py:450 ---------------------------------------------------------------- INFO tests.suites.clu.repl_monitor_test:replication.py:464 Replica Root: dc=example,dc=com INFO tests.suites.clu.repl_monitor_test:replication.py:465 Replica ID: 2 INFO tests.suites.clu.repl_monitor_test:replication.py:466 Replica Status: Online INFO tests.suites.clu.repl_monitor_test:replication.py:467 Max CSN: 60badf3a000600020000 INFO tests.suites.clu.repl_monitor_test:replication.py:470 Status For Agreement: "001" (ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001) Replica Enabled: on Update In Progress: TRUE Last Update Start: 20210605021938Z Last Update End: 19700101000000Z Number Of Changes Sent: 2:1/0 Number Of Changes Skipped: None Last Update Status: Error (0) Replica acquired successfully: Incremental update started Last Init Start: 19700101000000Z Last Init End: 19700101000000Z Last Init Status: unavailable Reap Active: 0 Replication Status: Not in Synchronization: supplier (60badf3a000600020000) consumer (60badf3a000400020000) State (green) Reason (Replication still in progress) Replication Lag Time: 00:00:00 INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:53 Check that "Replica Root: dc=example,dc=com" is present INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:53 Check that "Replica ID: 1" is present INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:53 Check that "Replica Status: Available" is present -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.repl_monitor_test:repl_monitor_test.py:38 Delete files | |||
Failed | suites/dynamic_plugins/dynamic_plugins_test.py::test_acceptance | 24.21 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99d0139a0> @pytest.mark.flaky(max_runs=2, min_passes=1) def test_acceptance(topology_m2): """Exercise each plugin and its main features, while changing the configuration without restarting the server. :id: 96136538-0151-4b09-9933-0e0cbf2c786c :setup: 2 Supplier Instances :steps: 1. Pause all replication 2. Set nsslapd-dynamic-plugins to on 3. Try to update LDBM config entry 4. Go through all plugin basic functionality 5. Resume replication 6. Go through all plugin basic functionality again 7. Check that data in sync and replication is working :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success 7. Success """ m1 = topology_m2.ms["supplier1"] msg = ' (no replication)' replication_run = False # First part of the test should be without replication > topology_m2.pause_all_replicas() suites/dynamic_plugins/dynamic_plugins_test.py:102: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/topologies.py:219: in pause_all_replicas replica = replicas.get(DEFAULT_SUFFIX) /usr/local/lib/python3.9/site-packages/lib389/replica.py:1765: in get replica = super(Replicas, self).get(selector, dn) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1146: in get results = self._get_selector(selector) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1177: in _get_selector return self._instance.search_ext_s( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99cf490a0> func = <built-in method search_ext of LDAP object at 0x7ff99fd6e720> args = ('cn=mapping tree,cn=config', 2, '(&(&(objectclass=nsds5Replica))(|(nsDS5ReplicaRoot=dc=example,dc=com)))', ['dn'], 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 43f47154-a8bc-4573-a199-13276ae7ac05 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 57c86aea-cb66-4836-929f-0f96517da396 / got description=43f47154-a8bc-4573-a199-13276ae7ac05) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Failed | suites/dynamic_plugins/dynamic_plugins_test.py::test_memory_corruption | 0.13 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99d0139a0> @pytest.mark.flaky(max_runs=2, min_passes=1) def test_memory_corruption(topology_m2): """Check the plugins for memory corruption issues while dynamic plugins option is enabled :id: 96136538-0151-4b09-9933-0e0cbf2c7862 :setup: 2 Supplier Instances :steps: 1. Pause all replication 2. Set nsslapd-dynamic-plugins to on 3. Try to update LDBM config entry 4. Restart the plugin many times in a linked list fashion restarting previous and preprevious plugins in the list of all plugins 5. Run the functional test 6. Repeat 4 and 5 steps for all plugins 7. Resume replication 8. Go through 4-6 steps once more 9. Check that data in sync and replication is working :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success 7. Success 8. Success 9. Success """ m1 = topology_m2.ms["supplier1"] msg = ' (no replication)' replication_run = False # First part of the test should be without replication > topology_m2.pause_all_replicas() suites/dynamic_plugins/dynamic_plugins_test.py:181: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/topologies.py:219: in pause_all_replicas replica = replicas.get(DEFAULT_SUFFIX) /usr/local/lib/python3.9/site-packages/lib389/replica.py:1765: in get replica = super(Replicas, self).get(selector, dn) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1146: in get results = self._get_selector(selector) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1177: in _get_selector return self._instance.search_ext_s( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99cf490a0> func = <built-in method search_ext of LDAP object at 0x7ff99fd6e720> args = ('cn=mapping tree,cn=config', 2, '(&(&(objectclass=nsds5Replica))(|(nsDS5ReplicaRoot=dc=example,dc=com)))', ['dn'], 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN | |||
Failed | suites/dynamic_plugins/dynamic_plugins_test.py::test_stress | 30.88 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99ca68eb0> @pytest.mark.flaky(max_runs=2, min_passes=1) @pytest.mark.tier2 def test_stress(topology_m2): """Test plugins while under a big load. Perform the test 5 times :id: 96136538-0151-4b09-9933-0e0cbf2c7863 :setup: 2 Supplier Instances :steps: 1. Pause all replication 2. Set nsslapd-dynamic-plugins to on 3. Try to update LDBM config entry 4. Do one run through all tests 5. Enable Referential integrity and MemberOf plugins 6. Launch three new threads to add a bunch of users 7. While we are adding users restart the MemberOf and Linked Attributes plugins many times 8. Wait for the 'adding' threads to complete 9. Now launch three threads to delete the users 10. Restart both the MemberOf, Referential integrity and Linked Attributes plugins during these deletes 11. Wait for the 'deleting' threads to complete 12. Now make sure both the MemberOf and Referential integrity plugins still work correctly 13. Cleanup the stress tests (delete the group entry) 14. Perform 4-13 steps five times 15. Resume replication 16. Go through 4-14 steps once more 17. Check that data in sync and replication is working :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success 7. Success 8. Success 9. Success 10. Success 11. Success 12. Success 13. Success 14. Success 15. Success 16. Success 17. Success """ m1 = topology_m2.ms["supplier1"] msg = ' (no replication)' replication_run = False stress_max_runs = 5 # First part of the test should be without replication topology_m2.pause_all_replicas() # First enable dynamic plugins m1.config.replace('nsslapd-dynamic-plugins', 'on') # Test that critical plugins can be updated even though the change might not be applied ldbm_config = LDBMConfig(m1) ldbm_config.replace('description', 'test') while True: # First run the tests with replication disabled, then rerun them with replication set up log.info('Do one run through all tests ' + msg + '...') > acceptance_test.check_all_plugins(topology_m2) suites/dynamic_plugins/dynamic_plugins_test.py:314: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ suites/plugins/acceptance_test.py:1807: in check_all_plugins func(topo, args) suites/plugins/acceptance_test.py:477: in test_automember task = plugin.fixup(branch2.dn, _filter='objectclass=top') /usr/local/lib/python3.9/site-packages/lib389/plugins.py:1087: in fixup task.create(properties=task_properties) /usr/local/lib/python3.9/site-packages/lib389/tasks.py:118: in create return super(Task, self).create(rdn, properties, basedn) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:972: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:947: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99ca68670> func = <built-in method result4 of LDAP object at 0x7ff99d06b7b0> args = (133, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d1dea6d7-2e9a-4882-86e0-34e58819b6de / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3c660f64-9d96-424c-8800-8ffadfcf58bc / got description=d1dea6d7-2e9a-4882-86e0-34e58819b6de) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Failed | suites/healthcheck/health_security_test.py::test_healthcheck_certif_expiring_within_30d | 10.08 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99ce1bd30> @pytest.mark.ds50873 @pytest.mark.bz1685160 @pytest.mark.xfail(ds_is_older("1.4.1"), reason="Not implemented") def test_healthcheck_certif_expiring_within_30d(topology_st): """Check if HealthCheck returns DSCERTLE0001 code :id: c2165032-88ba-4978-a4ca-2fecfd8c35d8 :setup: Standalone instance :steps: 1. Create DS instance 2. Use libfaketime to tell the process the date is within 30 days before certificate expiration 3. Use HealthCheck without --json option 4. Use HealthCheck with --json option :expectedresults: 1. Success 2. Success 3. Healthcheck reports DSCERTLE0001 code and related details 4. Healthcheck reports DSCERTLE0001 code and related details """ RET_CODE = 'DSCERTLE0001' standalone = topology_st.standalone standalone.enable_tls() # Cert is valid two years from today, so we count the date that is within 30 days before certificate expiration date_future = datetime.now() + timedelta(days=701) with libfaketime.fake_time(date_future): time.sleep(1) > run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) suites/healthcheck/health_security_test.py:304: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7ff99ce1bd30> instance = <lib389.DirSrv object at 0x7ff99ce1bf70> searched_code = 'DSCERTLE0001', json = False, searched_code2 = None def run_healthcheck_and_flush_log(topology, instance, searched_code, json, searched_code2=None): args = FakeArgs() args.instance = instance.serverid args.verbose = instance.verbose args.list_errors = False args.list_checks = False args.check = ['config', 'encryption', 'tls', 'fschecks'] args.dry_run = False if json: log.info('Use healthcheck with --json option') args.json = json health_check_run(instance, topology.logcap.log, args) assert topology.logcap.contains(searched_code) log.info('Healthcheck returned searched code: %s' % searched_code) if searched_code2 is not None: assert topology.logcap.contains(searched_code2) log.info('Healthcheck returned searched code: %s' % searched_code2) else: log.info('Use healthcheck without --json option') args.json = json health_check_run(instance, topology.logcap.log, args) > assert topology.logcap.contains(searched_code) E AssertionError: assert False E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSCERTLE0001') E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7ff99ce1bd30>.logcap suites/healthcheck/health_security_test.py:67: AssertionError -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. | |||
Failed | suites/healthcheck/health_security_test.py::test_healthcheck_certif_expired | 13.72 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99ce1bd30> @pytest.mark.ds50873 @pytest.mark.bz1685160 @pytest.mark.xfail(ds_is_older("1.4.1"), reason="Not implemented") def test_healthcheck_certif_expired(topology_st): """Check if HealthCheck returns DSCERTLE0002 code :id: ceff2c22-62c0-4fd9-b737-930a88458d68 :setup: Standalone instance :steps: 1. Create DS instance 2. Use libfaketime to tell the process the date is after certificate expiration 3. Use HealthCheck without --json option 4. Use HealthCheck with --json option :expectedresults: 1. Success 2. Success 3. Healthcheck reports DSCERTLE0002 code and related details 4. Healthcheck reports DSCERTLE0002 code and related details """ RET_CODE = 'DSCERTLE0002' standalone = topology_st.standalone standalone.enable_tls() # Cert is valid two years from today, so we count the date that is after expiration date_future = datetime.now() + timedelta(days=731) with libfaketime.fake_time(date_future): time.sleep(1) > run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) suites/healthcheck/health_security_test.py:343: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7ff99ce1bd30> instance = <lib389.DirSrv object at 0x7ff99ce1bf70> searched_code = 'DSCERTLE0002', json = False, searched_code2 = None def run_healthcheck_and_flush_log(topology, instance, searched_code, json, searched_code2=None): args = FakeArgs() args.instance = instance.serverid args.verbose = instance.verbose args.list_errors = False args.list_checks = False args.check = ['config', 'encryption', 'tls', 'fschecks'] args.dry_run = False if json: log.info('Use healthcheck with --json option') args.json = json health_check_run(instance, topology.logcap.log, args) assert topology.logcap.contains(searched_code) log.info('Healthcheck returned searched code: %s' % searched_code) if searched_code2 is not None: assert topology.logcap.contains(searched_code2) log.info('Healthcheck returned searched code: %s' % searched_code2) else: log.info('Use healthcheck without --json option') args.json = json health_check_run(instance, topology.logcap.log, args) > assert topology.logcap.contains(searched_code) E AssertionError: assert False E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSCERTLE0002') E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7ff99ce1bd30>.logcap suites/healthcheck/health_security_test.py:67: AssertionError -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 2 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSCERTLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: tls:certificate_expiration INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Expiring Certificate INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The certificate (Self-Signed-CA) will expire in less than 30 days INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Renew the certificate before it expires to prevent disruptions with TLS connections. INFO LogCapture:health.py:45 [2] DS Lint Error: DSCERTLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: tls:certificate_expiration INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Expiring Certificate INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The certificate (Server-Cert) will expire in less than 30 days INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Renew the certificate before it expires to prevent disruptions with TLS connections. INFO LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== | |||
Failed | suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[fqdn] | 4.45 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99c45fdf0> create_user = <lib389.idm.user.UserAccount object at 0x7ff99c45f6d0> aci_subject = 'dns = "localhost.localdomain"' @pytest.mark.flaky(max_runs=2, min_passes=1) @pytest.mark.parametrize('aci_subject', ('dns = "{}"'.format(HOSTNAME), 'ip = "{}"'.format(IP_ADDRESS)), ids=['fqdn','ip']) def test_search_dns_ip_aci(topology_st, create_user, aci_subject): """Verify that after performing multiple simple paged searches to completion on the suffix with DNS or IP based ACI :id: bbfddc46-a8c8-49ae-8c90-7265d05b22a9 :customerscenario: True :parametrized: yes :setup: Standalone instance, test user for binding, varying number of users for the search base :steps: 1. Back up and remove all previous ACI from suffix 2. Add an anonymous ACI for DNS check 3. Bind as test user 4. Search through added users with a simple paged control 5. Perform steps 4 three times in a row 6. Return ACI to the initial state 7. Go through all steps once again, but use IP subject dn instead of DNS :expectedresults: 1. Operation should be successful 2. Anonymous ACI should be successfully added 3. Bind should be successful 4. No error happens, all users should be found and sorted 5. Results should remain the same 6. ACI should be successfully returned 7. Results should be the same with ACI with IP subject dn """ users_num = 100 page_size = 5 users_list = add_users(topology_st, users_num, DEFAULT_SUFFIX) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] try: log.info('Back up current suffix ACI') acis_bck = topology_st.standalone.aci.list(DEFAULT_SUFFIX, ldap.SCOPE_BASE) log.info('Add test ACI') ACI_TARGET = '(targetattr != "userPassword")' ACI_ALLOW = '(version 3.0;acl "Anonymous access within domain"; allow (read,compare,search)' ACI_SUBJECT = '(userdn = "ldap:///anyone") and (%s);)' % aci_subject ACI_BODY = ensure_bytes(ACI_TARGET + ACI_ALLOW + ACI_SUBJECT) topology_st.standalone.modify_s(DEFAULT_SUFFIX, [(ldap.MOD_REPLACE, 'aci', ACI_BODY)]) log.info('Set user bind') conn = create_user.bind(TEST_USER_PWD, uri=f'ldap://{IP_ADDRESS}:{topology_st.standalone.port}') log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] log.info('Initiate three searches with a paged results control') for ii in range(3): log.info('%d search' % (ii + 1)) all_results = paged_search(conn, DEFAULT_SUFFIX, controls, search_flt, searchreq_attrlist) log.info('%d results' % len(all_results)) > assert len(all_results) == len(users_list) E assert 0 == 100 E +0 E -100 suites/paged_results/paged_results_test.py:576: AssertionError -------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:554 Back up current suffix ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:557 Add test ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:566 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:570 Initiate three searches with a paged results control INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:572 1 search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99cc713d0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:575 0 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:580 Restore ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users -------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:554 Back up current suffix ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:557 Add test ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:566 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:570 Initiate three searches with a paged results control INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:572 1 search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99cd97100>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:575 0 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:580 Restore ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Failed | suites/password/regression_test.py::test_unhashed_pw_switch | 30.71 | |
topo_supplier = <lib389.topologies.TopologyMain object at 0x7ff99c8556a0> @pytest.mark.flaky(max_runs=2, min_passes=1) @pytest.mark.ds49789 def test_unhashed_pw_switch(topo_supplier): """Check that nsslapd-unhashed-pw-switch works corrently :id: e5aba180-d174-424d-92b0-14fe7bb0b92a :setup: Supplier Instance :steps: 1. A Supplier is created, enable retrocl (not used here) 2. Create a set of users 3. update userpassword of user1 and check that unhashed#user#password is not logged (default) 4. udpate userpassword of user2 and check that unhashed#user#password is not logged ('nolog') 5. udpate userpassword of user3 and check that unhashed#user#password is logged ('on') :expectedresults: 1. Success 2. Success 3. Success (unhashed#user#password is not logged in the replication changelog) 4. Success (unhashed#user#password is not logged in the replication changelog) 5. Success (unhashed#user#password is logged in the replication changelog) """ MAX_USERS = 10 PEOPLE_DN = ("ou=people," + DEFAULT_SUFFIX) inst = topo_supplier.ms["supplier1"] inst.modify_s("cn=Retro Changelog Plugin,cn=plugins,cn=config", [(ldap.MOD_REPLACE, 'nsslapd-changelogmaxage', b'2m'), (ldap.MOD_REPLACE, 'nsslapd-changelog-trim-interval', b"5s"), (ldap.MOD_REPLACE, 'nsslapd-logAccess', b'on')]) inst.config.loglevel(vals=[256 + 4], service='access') inst.restart() # If you need any test suite initialization, # please, write additional fixture for that (including finalizer). # Topology for suites are predefined in lib389/topologies.py. # enable dynamic plugins, memberof and retro cl plugin # log.info('Enable plugins...') try: inst.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: ldap.error('Failed to enable dynamic plugins! ' + e.message['desc']) assert False #topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) inst.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) #topology_st.standalone.modify_s("cn=changelog,cn=ldbm database,cn=plugins,cn=config", [(ldap.MOD_REPLACE, 'nsslapd-cachememsize', str(100000))]) inst.restart() log.info('create users and group...') for idx in range(1, MAX_USERS): try: USER_DN = ("uid=member%d,%s" % (idx, PEOPLE_DN)) inst.add_s(Entry((USER_DN, {'objectclass': 'top extensibleObject'.split(), 'uid': 'member%d' % (idx)}))) except ldap.LDAPError as e: log.fatal('Failed to add user (%s): error %s' % (USER_DN, e.message['desc'])) assert False # Check default is that unhashed#user#password is not logged on 1.4.1.6+ user = "uid=member1,%s" % (PEOPLE_DN) inst.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', PASSWORD.encode())]) inst.stop() if ds_is_newer('1.4.1.6'): > _check_unhashed_userpw(inst, user, is_present=False) suites/password/regression_test.py:290: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ suites/password/regression_test.py:48: in _check_unhashed_userpw dbscanOut = inst.dbscan(DEFAULT_BENAME, 'replication_changelog') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:3015: in dbscan output = subprocess.check_output(cmd) /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = (['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db'],) kwargs = {'stdout': -1} process = <Popen: returncode: -6 args: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slap...> stdout = b'\ndbid: 0000006f000000000000\n\tentry count: 11\n\ndbid: 000000de000000000000\n\tpurge ruv:\n\t\t{replicageneration}...ber6\n\t\tcreatorsName: cn=directory manager\n\t\tmodifiersName: cn=directory manager\n\t\tcreateTimestamp: 2021060503' stderr = None, retcode = -6 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command '['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db']' died with <Signals.SIGABRT: 6>. /usr/lib64/python3.9/subprocess.py:528: CalledProcessError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. ------------------------------Captured stderr call------------------------------ free(): double free detected in tcache 2 -------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:257 Enable plugins... INFO tests.suites.password.regression_test:regression_test.py:272 create users and group... INFO lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db'] -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.regression_test:regression_test.py:108 Deleting user-uid=UIDpwtest1,ou=People,dc=example,dc=com INFO tests.suites.password.regression_test:regression_test.py:83 Reset pwpolicy configuration settings -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. ------------------------------Captured stderr call------------------------------ free(): double free detected in tcache 2 -------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:257 Enable plugins... INFO tests.suites.password.regression_test:regression_test.py:272 create users and group... INFO lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db'] | |||
Failed | suites/plugins/managed_entry_test.py::test_mentry01 | 0.15 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99c837430> _create_inital = None @pytest.mark.flaky(max_runs=2, min_passes=1) def test_mentry01(topo, _create_inital): """Test Managed Entries basic functionality :id: 9b87493b-0493-46f9-8364-6099d0e5d806 :setup: Standalone Instance :steps: 1. Check the plug-in status 2. Add Template and definition entry 3. Add our org units 4. Add users with PosixAccount ObjectClass and verify creation of User Private Group 5. Disable the plug-in and check the status 6. Enable the plug-in and check the status the plug-in is disabled and creation of UPG should fail 7. Add users with PosixAccount ObjectClass and verify creation of User Private Group 8. Add users, run ModRDN operation and check the User Private group 9. Add users, run LDAPMODIFY to change the gidNumber and check the User Private group 10. Checking whether creation of User Private group fails for existing group entry 11. Checking whether adding of posixAccount objectClass to existing user creates UPG 12. Running ModRDN operation and checking the user private groups mepManagedBy attribute 13. Deleting mepManagedBy attribute and running ModRDN operation to check if it creates a new UPG 14. Change the RDN of template entry, DSA Unwilling to perform error expected 15. Change the RDN of cn=Users to cn=TestUsers and check UPG are deleted :expected results: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success 7. Success 8. Success 9. Success 10. Success 11. Success 12. Success 13. Success 14. Fail(Unwilling to perform ) 15. Success """ # Check the plug-in status mana = ManagedEntriesPlugin(topo.standalone) assert mana.status() # Add Template and definition entry > org1 = OrganizationalUnits(topo.standalone, DEFAULT_SUFFIX).create(properties={'ou': 'Users'}) suites/plugins/managed_entry_test.py:159: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1215: in create return co.create(rdn, properties, self._basedn) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:972: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:947: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99c4cd370> func = <built-in method result4 of LDAP object at 0x7ff99c4cc810> args = (56, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.ALREADY_EXISTS: {'msgtype': 105, 'msgid': 56, 'result': 68, 'desc': 'Already exists', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: ALREADY_EXISTS | |||
Failed | suites/plugins/memberof_test.py::test_memberof_auto_add_oc | 11.41 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99ba69fa0> @pytest.mark.flaky(max_runs=2, min_passes=1) def test_memberof_auto_add_oc(topology_st): """Test the auto add objectclass (OC) feature. The plugin should add a predefined objectclass that will allow memberOf to be added to an entry. :id: d222af17-17a6-48a0-8f22-a38306726a25 :setup: Standalone instance :steps: 1. Enable dynamic plugins 2. Enable memberOf plugin 3. Test that the default add OC works. 4. Add a group that already includes one user 5. Assert memberOf on user1 6. Delete user1 and the group 7. Test invalid value (config validation) 8. Add valid objectclass 9. Add two users 10. Add a group that already includes one user 11. Add a user to the group :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success 7. Success 8. Success 9. Success 10. Success 11. Success """ # enable dynamic plugins try: topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: ldap.error('Failed to enable dynamic plugins! ' + e.message['desc']) assert False # Enable the plugin topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) # Test that the default add OC works. try: topology_st.standalone.add_s(Entry((USER1_DN, {'objectclass': 'top', 'objectclass': 'person', 'objectclass': 'organizationalPerson', 'objectclass': 'inetorgperson', 'sn': 'last', 'cn': 'full', 'givenname': 'user1', 'uid': 'user1' }))) except ldap.LDAPError as e: log.fatal('Failed to add user1 entry, error: ' + e.message['desc']) assert False # Add a group(that already includes one user try: topology_st.standalone.add_s(Entry((GROUP_DN, {'objectclass': 'top', 'objectclass': 'groupOfNames', 'cn': 'group', 'member': USER1_DN }))) except ldap.LDAPError as e: log.fatal('Failed to add group entry, error: ' + e.message['desc']) assert False # Assert memberOf on user1 _check_memberof(topology_st, USER1_DN, GROUP_DN) # Reset for the next test .... topology_st.standalone.delete_s(USER1_DN) topology_st.standalone.delete_s(GROUP_DN) # Test invalid value (config validation) topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) try: topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofAutoAddOC', b'invalid123')]) log.fatal('Incorrectly added invalid objectclass!') assert False except ldap.UNWILLING_TO_PERFORM: log.info('Correctly rejected invalid objectclass') except ldap.LDAPError as e: ldap.error('Unexpected error adding invalid objectclass - error: ' + e.message['desc']) assert False # Add valid objectclass topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) try: > topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofAutoAddOC', b'inetuser')]) suites/plugins/memberof_test.py:2763: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn=MemberOf Plugin,cn=plugins,cn=config', [(2, 'memberofAutoAddOC', b'inetuser')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7ff99c545840, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...93, function='_hookexec', code_context=[' return self._inner_hookexec(hook, methods, kwargs)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628dd91d0b0, file '/export/tests/suites/plugins/memberof_test.py', line 2769, code test_m...n='test_memberof_auto_add_oc', code_context=[' topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN,\n'], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99f625ca0> dn = 'cn=MemberOf Plugin,cn=plugins,cn=config' modlist = [(2, 'memberofAutoAddOC', b'inetuser')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn=MemberOf Plugin,cn=plugins,cn=config', [(2, 'memberofAutoAddOC', b'inetuser')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99f625ca0> dn = 'cn=MemberOf Plugin,cn=plugins,cn=config' modlist = [(2, 'memberofAutoAddOC', b'inetuser')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (16,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99f625ca0>, msgid = 16, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (16, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99f625ca0>, msgid = 16, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff99d1983f0>, 16, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99f625ca0> func = <built-in method result4 of LDAP object at 0x7ff99d1983f0> args = (16, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.UNWILLING_TO_PERFORM'> exc_value = UNWILLING_TO_PERFORM({'msgtype': 103, 'msgid': 16, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}) exc_traceback = <traceback object at 0x7ff99c4fad80> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99f625ca0> func = <built-in method result4 of LDAP object at 0x7ff99d1983f0> args = (16, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 103, 'msgid': 16, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7ff99ba69fa0> @pytest.mark.flaky(max_runs=2, min_passes=1) def test_memberof_auto_add_oc(topology_st): """Test the auto add objectclass (OC) feature. The plugin should add a predefined objectclass that will allow memberOf to be added to an entry. :id: d222af17-17a6-48a0-8f22-a38306726a25 :setup: Standalone instance :steps: 1. Enable dynamic plugins 2. Enable memberOf plugin 3. Test that the default add OC works. 4. Add a group that already includes one user 5. Assert memberOf on user1 6. Delete user1 and the group 7. Test invalid value (config validation) 8. Add valid objectclass 9. Add two users 10. Add a group that already includes one user 11. Add a user to the group :expectedresults: 1. Success 2. Success 3. Success 4. Success 5. Success 6. Success 7. Success 8. Success 9. Success 10. Success 11. Success """ # enable dynamic plugins try: topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: ldap.error('Failed to enable dynamic plugins! ' + e.message['desc']) assert False # Enable the plugin topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) # Test that the default add OC works. try: topology_st.standalone.add_s(Entry((USER1_DN, {'objectclass': 'top', 'objectclass': 'person', 'objectclass': 'organizationalPerson', 'objectclass': 'inetorgperson', 'sn': 'last', 'cn': 'full', 'givenname': 'user1', 'uid': 'user1' }))) except ldap.LDAPError as e: log.fatal('Failed to add user1 entry, error: ' + e.message['desc']) assert False # Add a group(that already includes one user try: topology_st.standalone.add_s(Entry((GROUP_DN, {'objectclass': 'top', 'objectclass': 'groupOfNames', 'cn': 'group', 'member': USER1_DN }))) except ldap.LDAPError as e: log.fatal('Failed to add group entry, error: ' + e.message['desc']) assert False # Assert memberOf on user1 _check_memberof(topology_st, USER1_DN, GROUP_DN) # Reset for the next test .... topology_st.standalone.delete_s(USER1_DN) topology_st.standalone.delete_s(GROUP_DN) # Test invalid value (config validation) topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) try: topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofAutoAddOC', b'invalid123')]) log.fatal('Incorrectly added invalid objectclass!') assert False except ldap.UNWILLING_TO_PERFORM: log.info('Correctly rejected invalid objectclass') except ldap.LDAPError as e: ldap.error('Unexpected error adding invalid objectclass - error: ' + e.message['desc']) assert False # Add valid objectclass topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) try: topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofAutoAddOC', b'inetuser')]) except ldap.LDAPError as e: > log.fatal('Failed to configure memberOf plugin: error ' + e.message['desc']) E AttributeError: 'UNWILLING_TO_PERFORM' object has no attribute 'message' suites/plugins/memberof_test.py:2768: AttributeError -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from uid=user1,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:2754 Correctly rejected invalid objectclass -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from uid=user1,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:2754 Correctly rejected invalid objectclass | |||
Failed | suites/replication/encryption_cl5_test.py::test_algorithm_unhashed | 99.40 | |
topology_with_tls = <lib389.topologies.TopologyMain object at 0x7ff99c6cf670> @pytest.mark.flaky(max_runs=2, min_passes=1) def test_algorithm_unhashed(topology_with_tls): """Check encryption algorithm AES And check unhashed#user#password attribute for encryption. :id: b7a37bf8-4b2e-4dbd-9891-70117d67558c :parametrized: yes :setup: Replication with two suppliers and SSL configured. :steps: 1. Enable changelog encrytion on supplier1 2. Add a user to supplier1/supplier2 3. Run dbscan -f on m1 to check unhashed#user#password attribute is encrypted. 4. Run dbscan -f on m2 to check unhashed#user#password attribute is in cleartext. 5. Modify password in supplier2/supplier1 6. Run dbscan -f on m1 to check unhashed#user#password attribute is encrypted. 7. Run dbscan -f on m2 to check unhashed#user#password attribute is in cleartext. :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass """ encryption = 'AES' m1 = topology_with_tls.ms['supplier1'] m2 = topology_with_tls.ms['supplier2'] m1.config.set('nsslapd-unhashed-pw-switch', 'on') m2.config.set('nsslapd-unhashed-pw-switch', 'on') test_passw = 'm2Test199' _enable_changelog_encryption(m1, encryption) for inst1, inst2 in ((m1, m2), (m2, m1)): # need to create a user specific to the encryption # else the two runs will hit the same user user_props={ 'uid': 'testuser_%s' % encryption, 'cn' : 'testuser_%s' % encryption, 'sn' : 'user', 'uidNumber' : '1000', 'gidNumber' : '1000', 'homeDirectory' : '/home/testuser_%s' % encryption } user_props["userPassword"] = PASSWORD users = UserAccounts(inst1, DEFAULT_SUFFIX) tuser = users.create(properties=user_props) > _check_unhashed_userpw_encrypted(m1, 'add', tuser.dn, PASSWORD, True) suites/replication/encryption_cl5_test.py:138: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ suites/replication/encryption_cl5_test.py:65: in _check_unhashed_userpw_encrypted dbscanOut = inst.dbscan(DEFAULT_BENAME, 'replication_changelog') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:3015: in dbscan output = subprocess.check_output(cmd) /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = (['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db'],) kwargs = {'stdout': -1} process = <Popen: returncode: -6 args: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slap...> stdout = b'\ndbid: 0000006f000000000000\n\tentry count: 10\n\ndbid: 000000de000000000000\n\tpurge ruv:\n\t\t{replicageneration}...lugins,cn=config\n\t\tmodifiersName: cn=Multisupplier Replication Plugin,cn=plugins,cn=config\n\t\tcreateTimestamp: 20' stderr = None, retcode = -6 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command '['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db']' died with <Signals.SIGABRT: 6>. /usr/lib64/python3.9/subprocess.py:528: CalledProcessError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3deac1bb-5f90-46d1-a473-6bae01a59541 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ccb056cd-ae61-424e-903c-14565bf49abb / got description=3deac1bb-5f90-46d1-a473-6bae01a59541) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect f732a264-8ace-48d0-89b0-340bd13a7dfa / got description=ccb056cd-ae61-424e-903c-14565bf49abb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working ------------------------------Captured stderr call------------------------------ free(): double free detected in tcache 2 -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:47 Configuring changelog encryption:supplier1 for: AES INFO tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db'] -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa522383-c979-46c8-a813-01d5469f8334 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4f4b05c5-3c28-4401-b34f-f20ed14dd26c / got description=fa522383-c979-46c8-a813-01d5469f8334) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 25f92ceb-5df2-42d0-abb4-aa70f7373134 / got description=4f4b05c5-3c28-4401-b34f-f20ed14dd26c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working ------------------------------Captured stderr call------------------------------ free(): double free detected in tcache 2 -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:47 Configuring changelog encryption:supplier1 for: AES INFO tests.suites.replication.encryption_cl5_test:encryption_cl5_test.py:64 Running dbscan -f to check unhashed#user#password attr INFO lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-supplier1/db/userRoot/replication_changelog.db'] | |||
Failed | tickets/ticket47781_test.py::test_ticket47781 | 14.23 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b9082e0> def test_ticket47781(topology_st): """ Testing for a deadlock after doing an online import of an LDIF with replication data. The replication agreement should be invalid. """ log.info('Testing Ticket 47781 - Testing for deadlock after importing LDIF with replication data') supplier = topology_st.standalone repl = ReplicationManager(DEFAULT_SUFFIX) repl.create_first_supplier(supplier) properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} # The agreement should point to a server that does NOT exist (invalid port) repl_agreement = supplier.agreement.create(suffix=DEFAULT_SUFFIX, host=supplier.host, port=5555, properties=properties) # # add two entries # log.info('Adding two entries...') supplier.add_s(Entry(('cn=entry1,dc=example,dc=com', { 'objectclass': 'top person'.split(), 'sn': 'user', 'cn': 'entry1'}))) supplier.add_s(Entry(('cn=entry2,dc=example,dc=com', { 'objectclass': 'top person'.split(), 'sn': 'user', 'cn': 'entry2'}))) # # export the replication ldif # log.info('Exporting replication ldif...') args = {EXPORT_REPL_INFO: True} exportTask = Tasks(supplier) exportTask.exportLDIF(DEFAULT_SUFFIX, None, "/tmp/export.ldif", args) # # Restart the server # log.info('Restarting server...') supplier.stop() supplier.start() # # Import the ldif # log.info('Import replication LDIF file...') importTask = Tasks(supplier) args = {TASK_WAIT: True} > importTask.importLDIF(DEFAULT_SUFFIX, None, "/tmp/export.ldif", args) tickets/ticket47781_test.py:85: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.tasks.Tasks object at 0x7ff99babb160> suffix = 'dc=example,dc=com', benamebase = None, input_file = '/tmp/export.ldif' args = {'wait': True} def importLDIF(self, suffix=None, benamebase=None, input_file=None, args=None): ''' Import from a LDIF format a given 'suffix' (or 'benamebase' that stores that suffix). It uses an internal task to acheive this request. If 'suffix' and 'benamebase' are specified, it uses 'benamebase' first else 'suffix'. If both 'suffix' and 'benamebase' are missing it raise ValueError 'input_file' is the ldif input file @param suffix - suffix of the backend @param benamebase - 'commonname'/'cn' of the backend (e.g. 'userRoot') @param ldif_input - file that will contain the entries in LDIF format to import @param args - is a dictionary that contains modifier of the import task wait: True/[False] - If True, 'export' waits for the completion of the task before to return @return None @raise ValueError ''' if self.conn.state != DIRSRV_STATE_ONLINE: raise ValueError("Invalid Server State %s! Must be online" % self.conn.state) # Checking the parameters if not benamebase and not suffix: raise ValueError("Specify either bename or suffix") if not input_file: raise ValueError("input_file is mandatory") if not os.path.exists(input_file): > raise ValueError("Import file (%s) does not exist" % input_file) E ValueError: Import file (/tmp/export.ldif) does not exist /usr/local/lib/python3.9/site-packages/lib389/tasks.py:499: ValueError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:597 Export task export_06052021_011455 for file /tmp/export.ldif completed successfully | |||
Failed | tickets/ticket47871_test.py::test_ticket47871_2 | 62.91 | |
topology_m1c1 = <lib389.topologies.TopologyMain object at 0x7ff99c362220> def test_ticket47871_2(topology_m1c1): ''' Wait until there is just a last entries ''' MAX_TRIES = 10 TRY_NO = 1 while TRY_NO <= MAX_TRIES: time.sleep(6) # at least 1 trimming occurred ents = topology_m1c1.ms["supplier1"].search_s(RETROCL_SUFFIX, ldap.SCOPE_ONELEVEL, "(objectclass=*)") assert len(ents) <= MAX_OTHERS topology_m1c1.ms["supplier1"].log.info("\nTry no %d it remains %d entries" % (TRY_NO, len(ents))) for ent in ents: topology_m1c1.ms["supplier1"].log.info("%s" % ent.dn) if len(ents) > 1: TRY_NO += 1 else: break > assert TRY_NO <= MAX_TRIES E assert 11 <= 10 tickets/ticket47871_test.py:100: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket47871_test.py:93 Try no 1 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 2 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 3 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 4 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 5 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 6 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 7 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 8 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 9 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog INFO lib389:ticket47871_test.py:93 Try no 10 it remains 10 entries INFO lib389:ticket47871_test.py:95 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:95 changenumber=10,cn=changelog | |||
Failed | tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_positive | 9.18 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b5dffd0> log_dir = '/var/log/dirsrv/slapd-standalone1/access' def test_ticket47910_logconv_start_end_positive(topology_st, log_dir): ''' Execute logconv.pl with -S and -E(endtime) with random time stamp This is execute successfully ''' # # Execute logconv.pl -S -E with random timestamp # log.info('Running test_ticket47910 - Execute logconv.pl -S -E with random values') log.info("taking current time with offset of 2 mins and formatting it to feed -S") start_time_stamp = (datetime.now() - timedelta(minutes=2)) formatted_start_time_stamp = format_time(start_time_stamp) log.info("taking current time with offset of 2 mins and formatting it to feed -E") end_time_stamp = (datetime.now() + timedelta(minutes=2)) formatted_end_time_stamp = format_time(end_time_stamp) log.info("Executing logconv.pl with -S and -E") result = execute_logconv(topology_st.standalone, formatted_start_time_stamp, formatted_end_time_stamp, log_dir) > assert result == 0 E assert 2 == 0 E +2 E -0 tickets/ticket47910_test.py:91: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.utils:ticket47910_test.py:36 Diable access log buffering INFO lib389.utils:ticket47910_test.py:39 Do a ldapsearch operation INFO lib389.utils:ticket47910_test.py:42 sleep for sometime so that access log file get generated -------------------------------Captured log call-------------------------------- INFO lib389.utils:ticket47910_test.py:79 Running test_ticket47910 - Execute logconv.pl -S -E with random values INFO lib389.utils:ticket47910_test.py:81 taking current time with offset of 2 mins and formatting it to feed -S INFO lib389.utils:ticket47910_test.py:85 taking current time with offset of 2 mins and formatting it to feed -E INFO lib389.utils:ticket47910_test.py:89 Executing logconv.pl with -S and -E INFO lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time INFO lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S [05/Jun/2021:01:20:38] -E [05/Jun/2021:01:24:38] /var/log/dirsrv/slapd-standalone1/access INFO lib389.utils:ticket47910_test.py:66 standard output INFO lib389.utils:ticket47910_test.py:67 standard errorsCan't locate sigtrap.pm in @INC (you may need to install the sigtrap module) (@INC contains: /usr/local/lib64/perl5/5.32 /usr/local/share/perl5/5.32 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at /usr/bin/logconv.pl line 23. BEGIN failed--compilation aborted at /usr/bin/logconv.pl line 23. | |||
Failed | tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_negative | 0.11 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b5dffd0> log_dir = '/var/log/dirsrv/slapd-standalone1/access' def test_ticket47910_logconv_start_end_negative(topology_st, log_dir): ''' Execute logconv.pl with -S and -E(endtime) with random time stamp This is a negative test case, where endtime will be lesser than the starttime This should give error message ''' # # Execute logconv.pl -S and -E with random timestamp # log.info('Running test_ticket47910 - Execute logconv.pl -S -E with starttime>endtime') log.info("taking current time with offset of 2 mins and formatting it to feed -S") start_time_stamp = (datetime.now() + timedelta(minutes=2)) formatted_start_time_stamp = format_time(start_time_stamp) log.info("taking current time with offset of 2 mins and formatting it to feed -E") end_time_stamp = (datetime.now() - timedelta(minutes=2)) formatted_end_time_stamp = format_time(end_time_stamp) log.info("Executing logconv.pl with -S and -E") result = execute_logconv(topology_st.standalone, formatted_start_time_stamp, formatted_end_time_stamp, log_dir) > assert result == 1 E assert 2 == 1 E +2 E -1 tickets/ticket47910_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389.utils:ticket47910_test.py:105 Running test_ticket47910 - Execute logconv.pl -S -E with starttime>endtime INFO lib389.utils:ticket47910_test.py:107 taking current time with offset of 2 mins and formatting it to feed -S INFO lib389.utils:ticket47910_test.py:111 taking current time with offset of 2 mins and formatting it to feed -E INFO lib389.utils:ticket47910_test.py:115 Executing logconv.pl with -S and -E INFO lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time INFO lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S [05/Jun/2021:01:24:38] -E [05/Jun/2021:01:20:38] /var/log/dirsrv/slapd-standalone1/access INFO lib389.utils:ticket47910_test.py:66 standard output INFO lib389.utils:ticket47910_test.py:67 standard errorsCan't locate sigtrap.pm in @INC (you may need to install the sigtrap module) (@INC contains: /usr/local/lib64/perl5/5.32 /usr/local/share/perl5/5.32 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at /usr/bin/logconv.pl line 23. BEGIN failed--compilation aborted at /usr/bin/logconv.pl line 23. | |||
Failed | tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_invalid | 0.11 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b5dffd0> log_dir = '/var/log/dirsrv/slapd-standalone1/access' def test_ticket47910_logconv_start_end_invalid(topology_st, log_dir): ''' Execute logconv.pl with -S and -E(endtime) with invalid time stamp This is a negative test case, where it should give error message ''' # # Execute logconv.pl -S and -E with invalid timestamp # log.info('Running test_ticket47910 - Execute logconv.pl -S -E with invalid timestamp') log.info("Set start time and end time to invalid values") start_time_stamp = "invalid" end_time_stamp = "invalid" log.info("Executing logconv.pl with -S and -E") result = execute_logconv(topology_st.standalone, start_time_stamp, end_time_stamp, log_dir) > assert result == 1 E assert 2 == 1 E +2 E -1 tickets/ticket47910_test.py:135: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389.utils:ticket47910_test.py:128 Running test_ticket47910 - Execute logconv.pl -S -E with invalid timestamp INFO lib389.utils:ticket47910_test.py:129 Set start time and end time to invalid values INFO lib389.utils:ticket47910_test.py:133 Executing logconv.pl with -S and -E INFO lib389.utils:ticket47910_test.py:61 Executing logconv.pl with -S current time and -E end time INFO lib389.utils:ticket47910_test.py:63 /usr/bin/logconv.pl -S invalid -E invalid /var/log/dirsrv/slapd-standalone1/access INFO lib389.utils:ticket47910_test.py:66 standard output INFO lib389.utils:ticket47910_test.py:67 standard errorsCan't locate sigtrap.pm in @INC (you may need to install the sigtrap module) (@INC contains: /usr/local/lib64/perl5/5.32 /usr/local/share/perl5/5.32 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at /usr/bin/logconv.pl line 23. BEGIN failed--compilation aborted at /usr/bin/logconv.pl line 23. | |||
Failed | tickets/ticket47910_test.py::test_ticket47910_logconv_noaccesslogs | 0.98 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b5dffd0> log_dir = '/var/log/dirsrv/slapd-standalone1/access' def test_ticket47910_logconv_noaccesslogs(topology_st, log_dir): ''' Execute logconv.pl -S(starttime) without specify access logs location ''' # # Execute logconv.pl -S with random timestamp and no access log location # log.info('Running test_ticket47910 - Execute logconv.pl without access logs') log.info("taking current time with offset of 2 mins and formatting it to feed -S") time_stamp = (datetime.now() - timedelta(minutes=2)) formatted_time_stamp = format_time(time_stamp) log.info("Executing logconv.pl with -S current time") cmd = [os.path.join(topology_st.standalone.get_bin_dir(), 'logconv.pl'), '-S', formatted_time_stamp] log.info(" ".join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() log.info("standard output" + ensure_str(stdout)) log.info("standard errors" + ensure_str(stderr)) > assert proc.returncode == 1 E assert 2 == 1 E +2 E -1 tickets/ticket47910_test.py:160: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389.utils:ticket47910_test.py:147 Running test_ticket47910 - Execute logconv.pl without access logs INFO lib389.utils:ticket47910_test.py:149 taking current time with offset of 2 mins and formatting it to feed -S INFO lib389.utils:ticket47910_test.py:152 Executing logconv.pl with -S current time INFO lib389.utils:ticket47910_test.py:154 /usr/bin/logconv.pl -S [05/Jun/2021:01:20:38] INFO lib389.utils:ticket47910_test.py:157 standard output INFO lib389.utils:ticket47910_test.py:158 standard errorsCan't locate sigtrap.pm in @INC (you may need to install the sigtrap module) (@INC contains: /usr/local/lib64/perl5/5.32 /usr/local/share/perl5/5.32 /usr/lib64/perl5/vendor_perl /usr/share/perl5/vendor_perl /usr/lib64/perl5 /usr/share/perl5) at /usr/bin/logconv.pl line 23. BEGIN failed--compilation aborted at /usr/bin/logconv.pl line 23. | |||
Failed | tickets/ticket47931_test.py::test_ticket47931 | 9.91 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6ca400> def test_ticket47931(topology_st): """Test Retro Changelog and MemberOf deadlock fix. Verification steps: - Enable retro cl and memberOf. - Create two backends: A & B. - Configure retrocl scoping for backend A. - Configure memberOf plugin for uniquemember - Create group in backend A. - In parallel, add members to the group on A, and make modifications to entries in backend B. - Make sure the server does not hang during the updates to both backends. """ # Enable dynamic plugins to make plugin configuration easier try: topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: log.error('Failed to enable dynamic plugins! ' + e.args[0]['desc']) assert False # Enable the plugins topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # Create second backend topology_st.standalone.backend.create(SECOND_SUFFIX, {BACKEND_NAME: SECOND_BACKEND}) topology_st.standalone.mappingtree.create(SECOND_SUFFIX, bename=SECOND_BACKEND) # Create the root node of the second backend try: topology_st.standalone.add_s(Entry((SECOND_SUFFIX, {'objectclass': 'top domain'.split(), 'dc': 'deadlock'}))) except ldap.LDAPError as e: log.fatal('Failed to create suffix entry: error ' + e.args[0]['desc']) assert False # Configure retrocl scope try: topology_st.standalone.modify_s(RETROCL_PLUGIN_DN, [(ldap.MOD_REPLACE, 'nsslapd-include-suffix', ensure_bytes(DEFAULT_SUFFIX))]) except ldap.LDAPError as e: log.error('Failed to configure retrocl plugin: ' + e.args[0]['desc']) assert False # Configure memberOf group attribute try: > topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofgroupattr', b'uniquemember')]) tickets/ticket47931_test.py:107: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn=MemberOf Plugin,cn=plugins,cn=config', [(2, 'memberofgroupattr', b'uniquemember')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x5628ddb8f730, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...93, function='_hookexec', code_context=[' return self._inner_hookexec(hook, methods, kwargs)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628ddb5a320, file '/export/tests/tickets/ticket47931_test.py', line 113, code test_ticket..., function='test_ticket47931', code_context=[' topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN,\n'], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b6ca9a0> dn = 'cn=MemberOf Plugin,cn=plugins,cn=config' modlist = [(2, 'memberofgroupattr', b'uniquemember')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn=MemberOf Plugin,cn=plugins,cn=config', [(2, 'memberofgroupattr', b'uniquemember')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b6ca9a0> dn = 'cn=MemberOf Plugin,cn=plugins,cn=config' modlist = [(2, 'memberofgroupattr', b'uniquemember')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (17,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b6ca9a0>, msgid = 17, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (17, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b6ca9a0>, msgid = 17, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff99b939a20>, 17, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b6ca9a0> func = <built-in method result4 of LDAP object at 0x7ff99b939a20> args = (17, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.UNWILLING_TO_PERFORM'> exc_value = UNWILLING_TO_PERFORM({'msgtype': 103, 'msgid': 17, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}) exc_traceback = <traceback object at 0x7ff99b6dfb40> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b6ca9a0> func = <built-in method result4 of LDAP object at 0x7ff99b939a20> args = (17, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 103, 'msgid': 17, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6ca400> def test_ticket47931(topology_st): """Test Retro Changelog and MemberOf deadlock fix. Verification steps: - Enable retro cl and memberOf. - Create two backends: A & B. - Configure retrocl scoping for backend A. - Configure memberOf plugin for uniquemember - Create group in backend A. - In parallel, add members to the group on A, and make modifications to entries in backend B. - Make sure the server does not hang during the updates to both backends. """ # Enable dynamic plugins to make plugin configuration easier try: topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: log.error('Failed to enable dynamic plugins! ' + e.args[0]['desc']) assert False # Enable the plugins topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # Create second backend topology_st.standalone.backend.create(SECOND_SUFFIX, {BACKEND_NAME: SECOND_BACKEND}) topology_st.standalone.mappingtree.create(SECOND_SUFFIX, bename=SECOND_BACKEND) # Create the root node of the second backend try: topology_st.standalone.add_s(Entry((SECOND_SUFFIX, {'objectclass': 'top domain'.split(), 'dc': 'deadlock'}))) except ldap.LDAPError as e: log.fatal('Failed to create suffix entry: error ' + e.args[0]['desc']) assert False # Configure retrocl scope try: topology_st.standalone.modify_s(RETROCL_PLUGIN_DN, [(ldap.MOD_REPLACE, 'nsslapd-include-suffix', ensure_bytes(DEFAULT_SUFFIX))]) except ldap.LDAPError as e: log.error('Failed to configure retrocl plugin: ' + e.args[0]['desc']) assert False # Configure memberOf group attribute try: topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofgroupattr', b'uniquemember')]) except ldap.LDAPError as e: log.fatal('Failed to configure memberOf plugin: error ' + e.args[0]['desc']) > assert False E assert False tickets/ticket47931_test.py:113: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=dc=deadlock INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=deadlock,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=deadlock,cn=ldbm database,cn=plugins,cn=config cn: deadlock nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/deadlock nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=deadlock objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="dc=deadlock",cn=mapping tree,cn=config cn: dc=deadlock nsslapd-backend: deadlock nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=dc\3Ddeadlock,cn=mapping tree,cn=config cn: dc=deadlock nsslapd-backend: deadlock nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree CRITICAL tests.tickets.ticket47931_test:ticket47931_test.py:112 Failed to configure memberOf plugin: error Server is unwilling to perform | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_init | 31.35 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_init(topology_m2): """ It adds - Objectclass with MAY 'member' - an entry ('bind_entry') with which we bind to test the 'SELFDN' operation It deletes the anonymous aci """ _header(topology_m2, 'test_ticket47988_init') # enable acl error logging mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', ensure_bytes(str(8192)))] # REPL topology_m2.ms["supplier1"].modify_s(DN_CONFIG, mod) topology_m2.ms["supplier2"].modify_s(DN_CONFIG, mod) mod = [(ldap.MOD_REPLACE, 'nsslapd-accesslog-level', ensure_bytes(str(260)))] # Internal op topology_m2.ms["supplier1"].modify_s(DN_CONFIG, mod) topology_m2.ms["supplier2"].modify_s(DN_CONFIG, mod) # add dummy entries for cpt in range(MAX_OTHERS): name = "%s%d" % (OTHER_NAME, cpt) topology_m2.ms["supplier1"].add_s(Entry(("cn=%s,%s" % (name, SUFFIX), { 'objectclass': "top person".split(), 'sn': name, 'cn': name}))) # check that entry 0 is replicated before loop = 0 entryDN = "cn=%s0,%s" % (OTHER_NAME, SUFFIX) while loop <= 10: try: ent = topology_m2.ms["supplier2"].getEntry(entryDN, ldap.SCOPE_BASE, "(objectclass=*)", ['telephonenumber']) break except ldap.NO_SUCH_OBJECT: time.sleep(1) loop += 1 assert (loop <= 10) topology_m2.ms["supplier1"].stop(timeout=10) topology_m2.ms["supplier2"].stop(timeout=10) # install the specific schema M1: ipa3.3, M2: ipa4.1 schema_file = os.path.join(topology_m2.ms["supplier1"].getDir(__file__, DATA_DIR), "ticket47988/schema_ipa3.3.tar.gz") _install_schema(topology_m2.ms["supplier1"], schema_file) schema_file = os.path.join(topology_m2.ms["supplier1"].getDir(__file__, DATA_DIR), "ticket47988/schema_ipa4.1.tar.gz") _install_schema(topology_m2.ms["supplier2"], schema_file) > topology_m2.ms["supplier1"].start(timeout=10) /export/tests/tickets/ticket47988_test.py:157: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:1075: in start subprocess.check_output(["systemctl", "start", "dirsrv@%s" % self.serverid], stderr=subprocess.STDOUT) /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = (['systemctl', 'start', 'dirsrv@supplier1'],) kwargs = {'stderr': -2, 'stdout': -1} process = <Popen: returncode: 1 args: ['systemctl', 'start', 'dirsrv@supplier1']> stdout = b'Job for dirsrv@supplier1.service failed because the control process exited with error code.\nSee "systemctl status dirsrv@supplier1.service" and "journalctl -xeu dirsrv@supplier1.service" for details.\n' stderr = None, retcode = 1 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command '['systemctl', 'start', 'dirsrv@supplier1']' returned non-zero exit status 1. /usr/lib64/python3.9/subprocess.py:528: CalledProcessError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4ea6dece-d7fa-42bf-86ca-45301107a2b5 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 33c195b4-57db-4790-831d-06a522ce57e1 / got description=4ea6dece-d7fa-42bf-86ca-45301107a2b5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_init INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/02common.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/50ns-admin.ldif INFO lib389:ticket47988_test.py:98 replace /etc/dirsrv/slapd-supplier1/schema/99user.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60nss-ldap.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60autofs.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/50ns-web.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60samba.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/10dna-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/05rfc4523.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60basev2.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/10automember-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/05rfc2927.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/10mep-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60ipadns.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/10rfc2307.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/50ns-mail.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/05rfc4524.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60trust.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60ipaconfig.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/50ns-directory.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60eduperson.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60mozilla.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/65ipasudo.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60rfc3712.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60rfc2739.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/50ns-value.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60acctpolicy.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/01core389.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60sabayon.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60pam-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/00core.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/25java-object.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60sudo.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/70ipaotp.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60pureftpd.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/61kerberos-ipav3.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60kerberos.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60basev3.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/06inetorgperson.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/30ns-common.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/28pilot.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/20subscriber.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/50ns-certificate.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier1/schema/60posix-winsync-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/02common.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/50ns-admin.ldif INFO lib389:ticket47988_test.py:98 replace /etc/dirsrv/slapd-supplier2/schema/99user.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60nss-ldap.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60autofs.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/50ns-web.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60samba.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/10dna-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/05rfc4523.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60basev2.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/10automember-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/05rfc2927.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/10mep-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60ipadns.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/10rfc2307.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/50ns-mail.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/05rfc4524.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60trust.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60ipaconfig.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/50ns-directory.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60eduperson.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60mozilla.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/65ipasudo.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60rfc3712.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60rfc2739.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/50ns-value.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60acctpolicy.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/01core389.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60sabayon.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60pam-plugin.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/00core.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/25java-object.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60sudo.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/70ipaotp.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60pureftpd.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/61kerberos-ipav3.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60kerberos.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60basev3.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/06inetorgperson.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/30ns-common.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/28pilot.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/20subscriber.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/50ns-certificate.ldif INFO lib389:ticket47988_test.py:102 add /etc/dirsrv/slapd-supplier2/schema/60posix-winsync-plugin.ldif | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_1 | 0.07 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_1(topology_m2): ''' Check that replication is working and pause replication M2->M1 ''' _header(topology_m2, 'test_ticket47988_1') topology_m2.ms["supplier1"].log.debug("\n\nCheck that replication is working and pause replication M2->M1\n") > _do_update_entry(supplier=topology_m2.ms["supplier2"], consumer=topology_m2.ms["supplier1"], attempts=5) /export/tests/tickets/ticket47988_test.py:234: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket47988_test.py:184: in _do_update_entry supplier.modify_s(entryDN, mod) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d013a0> func = <built-in method result4 of LDAP object at 0x7ff99ba0ccc0> args = (26, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_1 INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_2 | 0.08 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_2(topology_m2): ''' Update M1 schema and trigger update M1->M2 So M1 should learn new/extended definitions that are in M2 schema ''' _header(topology_m2, 'test_ticket47988_2') topology_m2.ms["supplier1"].log.debug("\n\nUpdate M1 schema and an entry on M1\n") > supplier1_schema_csn = topology_m2.ms["supplier1"].schema.get_schema_csn() /export/tests/tickets/ticket47988_test.py:246: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/schema.py:614: in get_schema_csn ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE, /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:864: in search_ext_s return self.result(msgid,all=1,timeout=timeout)[1] /usr/local/lib/python3.9/site-packages/lib389/__init__.py:141: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:756: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:760: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d8f160> func = <built-in method result4 of LDAP object at 0x7ff99b6add50> args = (62, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_2 INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_3 | 0.08 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_3(topology_m2): ''' Resume replication M2->M1 and check replication is still working ''' _header(topology_m2, 'test_ticket47988_3') > _resume_M2_to_M1(topology_m2) /export/tests/tickets/ticket47988_test.py:283: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket47988_test.py:222: in _resume_M2_to_M1 ents = topology_m2.ms["supplier2"].agreement.list(suffix=SUFFIX) /usr/local/lib/python3.9/site-packages/lib389/agreement.py:904: in list replica_entries = self.conn.replica.list(suffix) /usr/local/lib/python3.9/site-packages/lib389/replica.py:178: in list ents = self.conn.search_s(base, ldap.SCOPE_SUBTREE, filtr) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d013a0> func = <built-in method search_ext of LDAP object at 0x7ff99ba0ccc0> args = ('cn=mapping tree,cn=config', 2, '(&(objectclass=nsds5Replica)(nsDS5ReplicaRoot=dc=example,dc=com))', None, 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_3 INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### INFO lib389:ticket47988_test.py:221 ######################### resume RA M2->M1 ###################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_4 | 0.07 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_4(topology_m2): ''' Check schemaCSN is identical on both server And save the nsschemaCSN to later check they do not change unexpectedly ''' _header(topology_m2, 'test_ticket47988_4') > supplier1_schema_csn = topology_m2.ms["supplier1"].schema.get_schema_csn() /export/tests/tickets/ticket47988_test.py:295: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/schema.py:614: in get_schema_csn ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE, /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d8f160> func = <built-in method search_ext of LDAP object at 0x7ff99b6add50> args = ('cn=schema', 0, 'objectclass=*', ['nsSchemaCSN'], 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_4 INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_5 | 0.07 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_5(topology_m2): ''' Check schemaCSN do not change unexpectedly ''' _header(topology_m2, 'test_ticket47988_5') > _do_update_entry(supplier=topology_m2.ms["supplier1"], consumer=topology_m2.ms["supplier2"], attempts=5) /export/tests/tickets/ticket47988_test.py:313: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket47988_test.py:184: in _do_update_entry supplier.modify_s(entryDN, mod) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:612: in modify_ext_s msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:609: in modify_ext return self._ldap_call(self._l.modify_ext,dn,modlist,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls)) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d8f160> func = <built-in method modify_ext of LDAP object at 0x7ff99b6add50> args = ('cn=other_entry0,dc=example,dc=com', [(2, 'telephonenumber', b'170')], None, None) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_5 INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket47988_test.py::test_ticket47988_6 | 0.10 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99b86b850> def test_ticket47988_6(topology_m2): ''' Update M1 schema and trigger update M2->M1 So M2 should learn new/extended definitions that are in M1 schema ''' _header(topology_m2, 'test_ticket47988_6') topology_m2.ms["supplier1"].log.debug("\n\nUpdate M1 schema and an entry on M1\n") > supplier1_schema_csn = topology_m2.ms["supplier1"].schema.get_schema_csn() /export/tests/tickets/ticket47988_test.py:336: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/schema.py:614: in get_schema_csn ents = self.conn.search_s(DN_SCHEMA, ldap.SCOPE_BASE, /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:870: in search_s return self.search_ext_s(base,scope,filterstr,attrlist,attrsonly,None,None,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d8f160> func = <built-in method search_ext of LDAP object at 0x7ff99b6add50> args = ('cn=schema', 0, 'objectclass=*', ['nsSchemaCSN'], 0, None, ...) kwargs = {}, diagnostic_message_success = None, exc_type = None exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log call-------------------------------- INFO lib389:ticket47988_test.py:64 ############################################### INFO lib389:ticket47988_test.py:65 ####### INFO lib389:ticket47988_test.py:66 ####### test_ticket47988_6 INFO lib389:ticket47988_test.py:67 ####### INFO lib389:ticket47988_test.py:68 ################################################### | |||
Failed | tickets/ticket48005_test.py::test_ticket48005_setup | 12.52 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99ba2a0a0> def test_ticket48005_setup(topology_st): ''' allow dump core generate a test ldif file using dbgen.pl import the ldif ''' log.info("Ticket 48005 setup...") if hasattr(topology_st.standalone, 'prefix'): prefix = topology_st.standalone.prefix else: prefix = None sysconfig_dirsrv = os.path.join(topology_st.standalone.get_initconfig_dir(), 'dirsrv') cmdline = 'egrep "ulimit -c unlimited" %s' % sysconfig_dirsrv p = os.popen(cmdline, "r") ulimitc = p.readline() if ulimitc == "": log.info('No ulimit -c in %s' % sysconfig_dirsrv) log.info('Adding it') cmdline = 'echo "ulimit -c unlimited" >> %s' % sysconfig_dirsrv sysconfig_dirsrv_systemd = sysconfig_dirsrv + ".systemd" cmdline = 'egrep LimitCORE=infinity %s' % sysconfig_dirsrv_systemd p = os.popen(cmdline, "r") lcore = p.readline() if lcore == "": log.info('No LimitCORE in %s' % sysconfig_dirsrv_systemd) log.info('Adding it') cmdline = 'echo LimitCORE=infinity >> %s' % sysconfig_dirsrv_systemd topology_st.standalone.restart(timeout=10) ldif_file = topology_st.standalone.get_ldif_dir() + "/ticket48005.ldif" os.system('ls %s' % ldif_file) os.system('rm -f %s' % ldif_file) if hasattr(topology_st.standalone, 'prefix'): prefix = topology_st.standalone.prefix else: prefix = "" dbgen_prog = prefix + '/bin/dbgen.pl' log.info('dbgen_prog: %s' % dbgen_prog) os.system('%s -s %s -o %s -u -n 10000' % (dbgen_prog, SUFFIX, ldif_file)) cmdline = 'egrep dn: %s | wc -l' % ldif_file p = os.popen(cmdline, "r") dnnumstr = p.readline() num = int(dnnumstr) log.info("We have %d entries.\n", num) importTask = Tasks(topology_st.standalone) args = {TASK_WAIT: True} > importTask.importLDIF(SUFFIX, None, ldif_file, args) /export/tests/tickets/ticket48005_test.py:74: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.tasks.Tasks object at 0x7ff99b6de310> suffix = 'dc=example,dc=com', benamebase = None input_file = '/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif' args = {'wait': True} def importLDIF(self, suffix=None, benamebase=None, input_file=None, args=None): ''' Import from a LDIF format a given 'suffix' (or 'benamebase' that stores that suffix). It uses an internal task to acheive this request. If 'suffix' and 'benamebase' are specified, it uses 'benamebase' first else 'suffix'. If both 'suffix' and 'benamebase' are missing it raise ValueError 'input_file' is the ldif input file @param suffix - suffix of the backend @param benamebase - 'commonname'/'cn' of the backend (e.g. 'userRoot') @param ldif_input - file that will contain the entries in LDIF format to import @param args - is a dictionary that contains modifier of the import task wait: True/[False] - If True, 'export' waits for the completion of the task before to return @return None @raise ValueError ''' if self.conn.state != DIRSRV_STATE_ONLINE: raise ValueError("Invalid Server State %s! Must be online" % self.conn.state) # Checking the parameters if not benamebase and not suffix: raise ValueError("Specify either bename or suffix") if not input_file: raise ValueError("input_file is mandatory") if not os.path.exists(input_file): > raise ValueError("Import file (%s) does not exist" % input_file) E ValueError: Import file (/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif) does not exist /usr/local/lib/python3.9/site-packages/lib389/tasks.py:499: ValueError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ grep: /etc/sysconfig/dirsrv: No such file or directory grep: /etc/sysconfig/dirsrv.systemd: No such file or directory ls: cannot access '/var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif': No such file or directory sh: line 1: /bin/dbgen.pl: No such file or directory grep: /var/lib/dirsrv/slapd-standalone1/ldif/ticket48005.ldif: No such file or directory -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48005_test:ticket48005_test.py:31 Ticket 48005 setup... INFO tests.tickets.ticket48005_test:ticket48005_test.py:41 No ulimit -c in /etc/sysconfig/dirsrv INFO tests.tickets.ticket48005_test:ticket48005_test.py:42 Adding it INFO tests.tickets.ticket48005_test:ticket48005_test.py:50 No LimitCORE in /etc/sysconfig/dirsrv.systemd INFO tests.tickets.ticket48005_test:ticket48005_test.py:51 Adding it INFO tests.tickets.ticket48005_test:ticket48005_test.py:64 dbgen_prog: /bin/dbgen.pl INFO tests.tickets.ticket48005_test:ticket48005_test.py:70 We have 0 entries. | |||
Failed | tickets/ticket48013_test.py::test_ticket48013 | 9.80 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999a08bb0> def test_ticket48013(topology_st): ''' Content Synchonization: Test that invalid cookies are caught ''' cookies = ('#', '##', 'a#a#a', 'a#a#1') # Enable dynamic plugins try: topology_st.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', b'on')]) except ldap.LDAPError as e: log.error('Failed to enable dynamic plugin! {}'.format(e.args[0]['desc'])) assert False # Enable retro changelog topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # Enbale content sync plugin > topology_st.standalone.plugins.enable(name=PLUGIN_REPL_SYNC) /export/tests/tickets/ticket48013_test.py:61: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/plugins.py:2105: in enable plugin.enable() /usr/local/lib/python3.9/site-packages/lib389/plugins.py:58: in enable self.set('nsslapd-pluginEnabled', 'on') /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d4f9d0> func = <built-in method result4 of LDAP object at 0x7ff99c5fe360> args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket48194_test.py::test_run_1 | 6.93 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> def test_run_1(topology_st): """ Check nsSSL3Ciphers: +all All ciphers are enabled except null. Note: default allowWeakCipher (i.e., off) for +all """ _header(topology_st, 'Test Case 2 - Check the ciphers availability for "+all" with default allowWeakCiphers') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', b'64')]) # Make sure allowWeakCipher is not set. topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_DELETE, 'allowWeakCipher', None)]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_0' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:158: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 2 - Check the ciphers availability for "+all" with default allowWeakCiphers INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:151 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_2 | 6.47 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> def test_run_2(topology_st): """ Check nsSSL3Ciphers: +rsa_aes_128_sha,+rsa_aes_256_sha rsa_aes_128_sha, tls_rsa_aes_128_sha, rsa_aes_256_sha, tls_rsa_aes_256_sha are enabled. default allowWeakCipher """ _header(topology_st, 'Test Case 3 - Check the ciphers availability for "+rsa_aes_128_sha,+rsa_aes_256_sha" with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'+rsa_aes_128_sha,+rsa_aes_256_sha')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_1' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) connectWithOpenssl(topology_st, 'AES256-SHA256', False) > connectWithOpenssl(topology_st, 'AES128-SHA', True) /export/tests/tickets/ticket48194_test.py:184: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> cipher = 'AES128-SHA', expect = True def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: > assert False E assert False /export/tests/tickets/ticket48194_test.py:108: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 3 - Check the ciphers availability for "+rsa_aes_128_sha,+rsa_aes_256_sha" with default allowWeakCipher INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:175 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO lib389.utils:ticket48194_test.py:86 Testing AES128-SHA -- expect to handshake successfully INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES128-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' | |||
Failed | tickets/ticket48194_test.py::test_run_4 | 6.77 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> def test_run_4(topology_st): """ Check no nsSSL3Ciphers Default ciphers are enabled. default allowWeakCipher """ _header(topology_st, 'Test Case 5 - Check no nsSSL3Ciphers (-all) with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_DELETE, 'nsSSL3Ciphers', b'-all')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_3' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:228: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 5 - Check no nsSSL3Ciphers (-all) with default allowWeakCipher INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:221 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_5 | 6.85 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> def test_run_5(topology_st): """ Check nsSSL3Ciphers: default Default ciphers are enabled. default allowWeakCipher """ _header(topology_st, 'Test Case 6 - Check default nsSSL3Ciphers (default setting) with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'default')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_4' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:250: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 6 - Check default nsSSL3Ciphers (default setting) with default allowWeakCipher INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:243 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_6 | 6.71 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> def test_run_6(topology_st): """ Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 All ciphers are disabled. default allowWeakCipher """ _header(topology_st, 'Test Case 7 - Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 with default allowWeakCipher') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'+all,-TLS_RSA_WITH_AES_256_CBC_SHA256')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_5' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:274: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 7 - Check nsSSL3Ciphers: +all,-TLS_RSA_WITH_AES_256_CBC_SHA256 with default allowWeakCipher INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:267 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48194_test.py::test_run_8 | 6.87 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> def test_run_8(topology_st): """ Check nsSSL3Ciphers: default + allowWeakCipher: off Strong Default ciphers are enabled. """ _header(topology_st, 'Test Case 9 - Check default nsSSL3Ciphers (default setting + allowWeakCipher: off)') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(ENCRYPTION_DN, [(ldap.MOD_REPLACE, 'nsSSL3Ciphers', b'default'), (ldap.MOD_REPLACE, 'allowWeakCipher', b'off')]) log.info("\n######################### Restarting the server ######################\n") topology_st.standalone.stop(timeout=10) os.system('mv %s %s.48194_7' % (topology_st.standalone.errlog, topology_st.standalone.errlog)) os.system('touch %s' % (topology_st.standalone.errlog)) time.sleep(2) topology_st.standalone.start(timeout=120) > connectWithOpenssl(topology_st, 'DES-CBC3-SHA', False) /export/tests/tickets/ticket48194_test.py:297: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6145e0> cipher = 'DES-CBC3-SHA', expect = False def connectWithOpenssl(topology_st, cipher, expect): """ Connect with the given cipher Condition: If expect is True, the handshake should be successful. If expect is False, the handshake should be refused with access log: "Cannot communicate securely with peer: no common encryption algorithm(s)." """ log.info("Testing %s -- expect to handshake %s", cipher, "successfully" if expect else "failed") myurl = 'localhost:%s' % LDAPSPORT cmdline = ['/usr/bin/openssl', 's_client', '-connect', myurl, '-cipher', cipher] strcmdline = " ".join(cmdline) log.info("Running cmdline: %s", strcmdline) try: proc = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) except ValueError: log.info("%s failed: %s", cmdline, ValueError) proc.kill() while True: l = proc.stdout.readline() if l == b"": break if b'Cipher is' in l: log.info("Found: %s", l) if expect: if b'(NONE)' in l: assert False else: proc.stdin.close() assert True else: if b'(NONE)' in l: assert True else: proc.stdin.close() > assert False E assert False /export/tests/tickets/ticket48194_test.py:117: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 9 - Check default nsSSL3Ciphers (default setting + allowWeakCipher: off) INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:290 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Failed | tickets/ticket48228_test.py::test_ticket48228_test_global_policy | 10.16 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999be3a00> user = 'uid=user1,dc=example,dc=com', passwd = 'password', times = 6 def update_passwd(topology_st, user, passwd, times): # Set the default value cpw = passwd for i in range(times): log.info(" Bind as {%s,%s}" % (user, cpw)) topology_st.standalone.simple_bind_s(user, cpw) # Now update the value for this iter. cpw = 'password%d' % i try: > topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())]) /export/tests/tickets/ticket48228_test.py:136: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user1,dc=example,dc=com', [(2, 'userpassword', b'password0')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7ff999cdbc40, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628dc4cc7e0, file '/export/tests/tickets/ticket48228_test.py', line 141, code update_pass...t=[" topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999be3310> dn = 'uid=user1,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user1,dc=example,dc=com', [(2, 'userpassword', b'password0')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999be3310> dn = 'uid=user1,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (10,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999be3310>, msgid = 10, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (10, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999be3310>, msgid = 10, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff99b6f1600>, 10, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999be3310> func = <built-in method result4 of LDAP object at 0x7ff99b6f1600> args = (10, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 10, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user1,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7ff99c65ae80> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999be3310> func = <built-in method result4 of LDAP object at 0x7ff99b6f1600> args = (10, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 10, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user1,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7ff999be3a00> def test_ticket48228_test_global_policy(topology_st): """ Check global password policy """ log.info(' Set inhistory = 6') set_global_pwpolicy(topology_st, 6) log.info(' Bind as directory manager') log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) log.info(' Add an entry' + USER1_DN) try: topology_st.standalone.add_s( Entry((USER1_DN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(), 'sn': '1', 'cn': 'user 1', 'uid': 'user1', 'givenname': 'user', 'mail': 'user1@example.com', 'userpassword': 'password'}))) except ldap.LDAPError as e: log.fatal('test_ticket48228: Failed to add user' + USER1_DN + ': error ' + e.message['desc']) assert False log.info(' Update the password of ' + USER1_DN + ' 6 times') > update_passwd(topology_st, USER1_DN, 'password', 6) /export/tests/tickets/ticket48228_test.py:174: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff999be3a00> user = 'uid=user1,dc=example,dc=com', passwd = 'password', times = 6 def update_passwd(topology_st, user, passwd, times): # Set the default value cpw = passwd for i in range(times): log.info(" Bind as {%s,%s}" % (user, cpw)) topology_st.standalone.simple_bind_s(user, cpw) # Now update the value for this iter. cpw = 'password%d' % i try: topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', cpw.encode())]) except ldap.LDAPError as e: log.fatal( > 'test_ticket48228: Failed to update the password ' + cpw + ' of user ' + user + ': error ' + e.message[ 'desc']) E AttributeError: 'INSUFFICIENT_ACCESS' object has no attribute 'message' /export/tests/tickets/ticket48228_test.py:139: AttributeError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket48252_test.py::test_ticket48252_run_0 | 2.35 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b9880d0> def test_ticket48252_run_0(topology_st): """ Delete an entry cn=test_entry0 Check it is not in the 'cn' index file """ log.info("Case 1 - Check deleted entry is not in the 'cn' index file") uas = UserAccounts(topology_st.standalone, DEFAULT_SUFFIX) del_rdn = "uid=%s0" % TEST_USER del_entry = uas.get('%s0' % TEST_USER) log.info(" Deleting a test entry %s..." % del_entry) del_entry.delete() > assert in_index_file(topology_st, 0, 'cn') is False /export/tests/tickets/ticket48252_test.py:80: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48252_test.py:57: in in_index_file dbscanOut = topology_st.standalone.dbscan(DEFAULT_BENAME, index) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:3015: in dbscan output = subprocess.check_output(cmd) /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = (['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db'],) kwargs = {'stdout': -1} process = <Popen: returncode: -6 args: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slap...> stdout = b'*%20us \n*389 \n*89_ ...9_ds_system \n=demo%20user \n=demo_group ' stderr = None, retcode = -6 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command '['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db']' died with <Signals.SIGABRT: 6>. /usr/lib64/python3.9/subprocess.py:528: CalledProcessError ------------------------------Captured stderr call------------------------------ free(): double free detected in tcache 2 -------------------------------Captured log call-------------------------------- INFO lib389:__init__.py:3014 Running script: ['/usr/bin/dbscan', '-f', '/var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db'] | |||
Failed | tickets/ticket48252_test.py::test_ticket48252_run_1 | 0.09 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b9880d0> def test_ticket48252_run_1(topology_st): """ Delete an entry cn=test_entry1 Check it is in the 'objectclass' index file as a tombstone entry """ log.info("Case 2 - Check deleted entry is in the 'objectclass' index file as a tombstone entry") uas = UserAccounts(topology_st.standalone, DEFAULT_SUFFIX) del_rdn = "uid=%s1" % TEST_USER > del_entry = uas.get('%s1' % TEST_USER) /export/tests/tickets/ticket48252_test.py:98: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1146: in get results = self._get_selector(selector) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1177: in _get_selector return self._instance.search_ext_s( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:864: in search_ext_s return self.result(msgid,all=1,timeout=timeout)[1] /usr/local/lib/python3.9/site-packages/lib389/__init__.py:141: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:756: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:760: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b661790> func = <built-in method result4 of LDAP object at 0x7ff999729720> args = (40, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.SERVER_DOWN: {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: SERVER_DOWN | |||
Failed | tickets/ticket48266_test.py::test_ticket48266_count_csn_evaluation | 2.03 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff999b21910> entries = None def test_ticket48266_count_csn_evaluation(topology_m2, entries): ents = topology_m2.ms["supplier1"].agreement.list(suffix=SUFFIX) assert len(ents) == 1 > first_csn = _get_first_not_replicated_csn(topology_m2) /export/tests/tickets/ticket48266_test.py:176: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff999b21910> def _get_first_not_replicated_csn(topology_m2): name = "cn=%s2,%s" % (NEW_ACCOUNT, SUFFIX) # read the first CSN that will not be replicated mod = [(ldap.MOD_REPLACE, 'telephonenumber', ensure_bytes('123456'))] topology_m2.ms["supplier1"].modify_s(name, mod) msgid = topology_m2.ms["supplier1"].search_ext(name, ldap.SCOPE_SUBTREE, 'objectclass=*', ['nscpentrywsi']) rtype, rdata, rmsgid = topology_m2.ms["supplier1"].result2(msgid) attrs = None for dn, raw_attrs in rdata: topology_m2.ms["supplier1"].log.info("dn: %s" % dn) if 'nscpentrywsi' in raw_attrs: attrs = raw_attrs['nscpentrywsi'] assert attrs for attr in attrs: if ensure_str(attr.lower()).startswith('telephonenumber'): break assert attr log.info("############# %s " % name) # now retrieve the CSN of the operation we are looking for csn = None found_ops = topology_m2.ms['supplier1'].ds_access_log.match(".*MOD dn=\"%s\".*" % name) assert(len(found_ops) > 0) found_op = topology_m2.ms['supplier1'].ds_access_log.parse_line(found_ops[-1]) log.info(found_op) # Now look for the related CSN found_csns = topology_m2.ms['supplier1'].ds_access_log.match(".*conn=%s op=%s RESULT.*" % (found_op['conn'], found_op['op'])) assert(len(found_csns) > 0) found_csn = topology_m2.ms['supplier1'].ds_access_log.parse_line(found_csns[-1]) log.info(found_csn) > return found_csn['csn'] E KeyError: 'csn' /export/tests/tickets/ticket48266_test.py:147: KeyError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48266_test.py:125 dn: cn=new_account2,dc=example,dc=com INFO tests.tickets.ticket48266_test:ticket48266_test.py:134 ############# cn=new_account2,dc=example,dc=com INFO tests.tickets.ticket48266_test:ticket48266_test.py:140 {'action': 'MOD', 'timestamp': '[05/Jun/2021:01:31:24.598221097 -0400]', 'conn': '1', 'op': '12', 'rem': 'dn="cn=new_account2,dc=example,dc=com"', 'datetime': datetime.datetime(2021, 6, 5, 1, 0, 0, 598221, tzinfo=tzoffset(None, -14400))} INFO tests.tickets.ticket48266_test:ticket48266_test.py:146 {'action': 'RESULT', 'timestamp': '[05/Jun/2021:01:31:24.649935323 -0400]', 'conn': '1', 'op': '12', 'rem': 'err=0 tag=103 nentries=0 wtime=0.000178478 optime=0.051780043 etime=0.051953895 csn=60bb0c2c000000010000', 'datetime': datetime.datetime(2021, 6, 5, 1, 0, 0, 649935, tzinfo=tzoffset(None, -14400))} | |||
Failed | tickets/ticket48325_test.py::test_ticket48325 | 42.86 | |
topology_m1h1c1 = <lib389.topologies.TopologyMain object at 0x7ff999474c10> def test_ticket48325(topology_m1h1c1): """ Test that the RUV element order is correctly maintained when promoting a hub or consumer. """ # # Promote consumer to supplier # C1 = topology_m1h1c1.cs["consumer1"] M1 = topology_m1h1c1.ms["supplier1"] H1 = topology_m1h1c1.hs["hub1"] repl = ReplicationManager(DEFAULT_SUFFIX) > repl._ensure_changelog(C1) /export/tests/tickets/ticket48325_test.py:53: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/replica.py:1928: in _ensure_changelog cl.create(properties={ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:972: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:947: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999b01f40> func = <built-in method result4 of LDAP object at 0x7ff99944c630> args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 15, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:524 Creating replication topology. INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 13479622-9c40-412c-82c9-6a4ac40e7e65 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is working INFO lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect d9c239c8-5db8-4297-b977-64c0ed67601e / got description=13479622-9c40-412c-82c9-6a4ac40e7e65) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Failed | tickets/ticket48342_test.py::test_ticket4026 | 139.72 | |
topology_m3 = <lib389.topologies.TopologyMain object at 0x7ff999453100> def test_ticket4026(topology_m3): """Write your replication testcase here. To access each DirSrv instance use: topology_m3.ms["supplier1"], topology_m3.ms["supplier2"], ..., topology_m3.hub1, ..., topology_m3.consumer1, ... Also, if you need any testcase initialization, please, write additional fixture for that(include finalizer). """ try: topology_m3.ms["supplier1"].add_s(Entry((PEOPLE_DN, { 'objectclass': "top extensibleObject".split(), 'ou': 'people'}))) except ldap.ALREADY_EXISTS: pass topology_m3.ms["supplier1"].add_s(Entry(('ou=ranges,' + SUFFIX, { 'objectclass': 'top organizationalunit'.split(), 'ou': 'ranges' }))) for cpt in range(MAX_ACCOUNTS): name = "user%d" % (cpt) topology_m3.ms["supplier1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), { 'objectclass': 'top posixAccount extensibleObject'.split(), 'uid': name, 'cn': name, 'uidNumber': '1', 'gidNumber': '1', 'homeDirectory': '/home/%s' % name }))) # make supplier3 having more free slots that supplier2 # so supplier1 will contact supplier3 _dna_config(topology_m3.ms["supplier1"], nextValue=100, maxValue=10) _dna_config(topology_m3.ms["supplier2"], nextValue=200, maxValue=10) _dna_config(topology_m3.ms["supplier3"], nextValue=300, maxValue=3000) # Turn on lots of error logging now. mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', b'16384')] # mod = [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', '1')] topology_m3.ms["supplier1"].modify_s('cn=config', mod) topology_m3.ms["supplier2"].modify_s('cn=config', mod) topology_m3.ms["supplier3"].modify_s('cn=config', mod) # We need to wait for the event in dna.c to fire to start the servers # see dna.c line 899 time.sleep(60) # add on supplier1 users with description DNA for cpt in range(10): name = "user_with_desc1_%d" % (cpt) topology_m3.ms["supplier1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), { 'objectclass': 'top posixAccount extensibleObject'.split(), 'uid': name, 'cn': name, 'description': '-1', 'uidNumber': '1', 'gidNumber': '1', 'homeDirectory': '/home/%s' % name }))) # give time to negociate supplier1 <--> supplier3 time.sleep(10) # add on supplier1 users with description DNA for cpt in range(11, 20): name = "user_with_desc1_%d" % (cpt) > topology_m3.ms["supplier1"].add_s(Entry(("uid=%s,%s" % (name, PEOPLE_DN), { 'objectclass': 'top posixAccount extensibleObject'.split(), 'uid': name, 'cn': name, 'description': '-1', 'uidNumber': '1', 'gidNumber': '1', 'homeDirectory': '/home/%s' % name }))) /export/tests/tickets/ticket48342_test.py:118: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:439: in add_s return self.add_ext_s(dn,modlist,None,None) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:171: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999a542e0> func = <built-in method result4 of LDAP object at 0x7ff99f3d03c0> args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.OPERATIONS_ERROR: {'msgtype': 105, 'msgid': 15, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Allocation of a new value for range cn=dna config,cn=distributed numeric assignment plugin,cn=plugins,cn=config failed! Unable to proceed.'} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 72fc1390-6638-4c36-befb-cc9cdab873c3 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ec55684b-430c-4ed7-84df-aa7811b8c776 / got description=72fc1390-6638-4c36-befb-cc9cdab873c3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect bfc46d1b-f814-49b7-8167-0ec9dd955af8 / got description=ec55684b-430c-4ed7-84df-aa7811b8c776) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 09694458-bf95-469d-9199-6f29d067adf8 / got description=bfc46d1b-f814-49b7-8167-0ec9dd955af8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 INFO tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... INFO tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... INFO tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... INFO tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... INFO tests.tickets.ticket48342_test:ticket48342_test.py:19 Add dna plugin config entry...ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO tests.tickets.ticket48342_test:ticket48342_test.py:37 Enable the DNA plugin... INFO tests.tickets.ticket48342_test:ticket48342_test.py:44 Restarting the server... | |||
Failed | tickets/ticket48637_test.py::test_ticket48637 | 15.10 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999441490> def test_ticket48637(topology_st): """Test for entry cache corruption This requires automember and managed entry plugins to be configured. Then remove the group that automember would use to trigger a failure when adding a new entry. Automember fails, and then managed entry also fails. Make sure a base search on the entry returns error 32 """ if DEBUGGING: # Add debugging steps(if any)... pass # # Add our setup entries # try: topology_st.standalone.add_s(Entry((PEOPLE_OU, { 'objectclass': 'top organizationalunit'.split(), 'ou': 'people'}))) except ldap.ALREADY_EXISTS: pass except ldap.LDAPError as e: log.fatal('Failed to add people ou: ' + str(e)) assert False try: topology_st.standalone.add_s(Entry((GROUP_OU, { 'objectclass': 'top organizationalunit'.split(), 'ou': 'groups'}))) except ldap.ALREADY_EXISTS: pass except ldap.LDAPError as e: log.fatal('Failed to add groups ou: ' + str(e)) assert False try: topology_st.standalone.add_s(Entry((MEP_OU, { 'objectclass': 'top extensibleObject'.split(), 'ou': 'mep'}))) except ldap.LDAPError as e: log.fatal('Failed to add MEP ou: ' + str(e)) assert False try: topology_st.standalone.add_s(Entry((MEP_TEMPLATE, { 'objectclass': 'top mepTemplateEntry'.split(), 'cn': 'mep template', 'mepRDNAttr': 'cn', 'mepStaticAttr': 'objectclass: groupofuniquenames', 'mepMappedAttr': 'cn: $uid'}))) except ldap.LDAPError as e: log.fatal('Failed to add MEP ou: ' + str(e)) assert False # # Configure automember # try: topology_st.standalone.add_s(Entry((AUTO_DN, { 'cn': 'All Users', 'objectclass': ['top', 'autoMemberDefinition'], 'autoMemberScope': 'dc=example,dc=com', 'autoMemberFilter': 'objectclass=person', 'autoMemberDefaultGroup': GROUP_DN, 'autoMemberGroupingAttr': 'uniquemember:dn'}))) except ldap.LDAPError as e: log.fatal('Failed to configure automember plugin : ' + str(e)) assert False # # Configure managed entry plugin # try: topology_st.standalone.add_s(Entry((MEP_DN, { 'cn': 'MEP Definition', 'objectclass': ['top', 'extensibleObject'], 'originScope': 'ou=people,dc=example,dc=com', 'originFilter': 'objectclass=person', 'managedBase': 'ou=groups,dc=example,dc=com', 'managedTemplate': MEP_TEMPLATE}))) except ldap.LDAPError as e: log.fatal('Failed to configure managed entry plugin : ' + str(e)) assert False # # Restart DS # topology_st.standalone.restart(timeout=30) # # Add entry that should fail since the automember group does not exist # try: topology_st.standalone.add_s(Entry((USER_DN, { 'uid': 'test', 'objectclass': ['top', 'person', 'extensibleObject'], 'sn': 'test', 'cn': 'test'}))) except ldap.LDAPError as e: pass # # Search for the entry - it should not be returned # try: entry = topology_st.standalone.search_s(USER_DN, ldap.SCOPE_SUBTREE, 'objectclass=*') if entry: log.fatal('Entry was incorrectly returned') > assert False E assert False /export/tests/tickets/ticket48637_test.py:139: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket48637_test:ticket48637_test.py:138 Entry was incorrectly returned | |||
Failed | tickets/ticket48759_test.py::test_ticket48759 | 10.46 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999d094c0> def test_ticket48759(topology_st): """ The fix for ticket 48759 has to prevent plugin calls for tombstone purging The test uses the memberof and retrocl plugins to verify this. In tombstone purging without the fix the mmeberof plugin is called, if the tombstone entry is a group, it modifies the user entries for the group and if retrocl is enabled this mod is written to the retrocl The test sequence is: - enable replication - enable memberof and retro cl plugin - add user entries - add a group and add the users as members - verify memberof is set to users - delete the group - verify memberof is removed from users - add group again - verify memberof is set to users - get number of changes in retro cl for one user - configure tombstone purging - wait for purge interval to pass - add a dummy entry to increase maxcsn - wait for purge interval to pass two times - get number of changes in retro cl for user again - assert there was no additional change """ log.info('Testing Ticket 48759 - no plugin calls for tombstone purging') # # Setup Replication # log.info('Setting up replication...') repl = ReplicationManager(DEFAULT_SUFFIX) repl.create_first_supplier(topology_st.standalone) # # enable dynamic plugins, memberof and retro cl plugin # log.info('Enable plugins...') try: topology_st.standalone.config.set('nsslapd-dynamic-plugins', 'on') except ldap.LDAPError as e: ldap.error('Failed to enable dynamic plugins! ' + e.args[0]['desc']) assert False topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # Configure memberOf group attribute try: > topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofgroupattr', b'member')]) /export/tests/tickets/ticket48759_test.py:128: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn=MemberOf Plugin,cn=plugins,cn=config', [(2, 'memberofgroupattr', b'member')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x5628ddd00af0, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...93, function='_hookexec', code_context=[' return self._inner_hookexec(hook, methods, kwargs)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628ddbaf850, file '/export/tests/tickets/ticket48759_test.py', line 134, code test_ticket..., function='test_ticket48759', code_context=[' topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN,\n'], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d09490> dn = 'cn=MemberOf Plugin,cn=plugins,cn=config' modlist = [(2, 'memberofgroupattr', b'member')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn=MemberOf Plugin,cn=plugins,cn=config', [(2, 'memberofgroupattr', b'member')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d09490> dn = 'cn=MemberOf Plugin,cn=plugins,cn=config' modlist = [(2, 'memberofgroupattr', b'member')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (20,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d09490>, msgid = 20, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (20, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d09490>, msgid = 20, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff99f49b5d0>, 20, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d09490> func = <built-in method result4 of LDAP object at 0x7ff99f49b5d0> args = (20, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.UNWILLING_TO_PERFORM'> exc_value = UNWILLING_TO_PERFORM({'msgtype': 103, 'msgid': 20, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}) exc_traceback = <traceback object at 0x7ff999d17380> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999d09490> func = <built-in method result4 of LDAP object at 0x7ff99f49b5d0> args = (20, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 103, 'msgid': 20, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7ff999d094c0> def test_ticket48759(topology_st): """ The fix for ticket 48759 has to prevent plugin calls for tombstone purging The test uses the memberof and retrocl plugins to verify this. In tombstone purging without the fix the mmeberof plugin is called, if the tombstone entry is a group, it modifies the user entries for the group and if retrocl is enabled this mod is written to the retrocl The test sequence is: - enable replication - enable memberof and retro cl plugin - add user entries - add a group and add the users as members - verify memberof is set to users - delete the group - verify memberof is removed from users - add group again - verify memberof is set to users - get number of changes in retro cl for one user - configure tombstone purging - wait for purge interval to pass - add a dummy entry to increase maxcsn - wait for purge interval to pass two times - get number of changes in retro cl for user again - assert there was no additional change """ log.info('Testing Ticket 48759 - no plugin calls for tombstone purging') # # Setup Replication # log.info('Setting up replication...') repl = ReplicationManager(DEFAULT_SUFFIX) repl.create_first_supplier(topology_st.standalone) # # enable dynamic plugins, memberof and retro cl plugin # log.info('Enable plugins...') try: topology_st.standalone.config.set('nsslapd-dynamic-plugins', 'on') except ldap.LDAPError as e: ldap.error('Failed to enable dynamic plugins! ' + e.args[0]['desc']) assert False topology_st.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology_st.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # Configure memberOf group attribute try: topology_st.standalone.modify_s(MEMBEROF_PLUGIN_DN, [(ldap.MOD_REPLACE, 'memberofgroupattr', b'member')]) except ldap.LDAPError as e: log.fatal('Failed to configure memberOf plugin: error ' + e.args[0]['desc']) > assert False E assert False /export/tests/tickets/ticket48759_test.py:134: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket48759_test:ticket48759_test.py:133 Failed to configure memberOf plugin: error Server is unwilling to perform | |||
Failed | tickets/ticket48784_test.py::test_ticket48784 | 57.89 | |
Fixture "add_entry" called directly. Fixtures are not meant to be called directly, but are created automatically when test functions request them as parameters. See https://docs.pytest.org/en/latest/fixture.html for more information about fixtures, and https://docs.pytest.org/en/latest/deprecations.html#calling-fixtures-directly about how to update your code. -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 652b6a73-76a9-45f1-926a-e24a086b4758 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d3d6be81-3963-463a-8f8d-0a050e9740d3 / got description=652b6a73-76a9-45f1-926a-e24a086b4758) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48784_test:ticket48784_test.py:90 Ticket 48784 - Allow usage of OpenLDAP libraries that don't use NSS for crypto INFO tests.tickets.ticket48784_test:ticket48784_test.py:50 ######################### Configure SSL/TLS agreements ###################### INFO tests.tickets.ticket48784_test:ticket48784_test.py:51 ######################## supplier1 <-- startTLS -> supplier2 ##################### INFO tests.tickets.ticket48784_test:ticket48784_test.py:53 ##### Update the agreement of supplier1 INFO tests.tickets.ticket48784_test:ticket48784_test.py:58 ##### Update the agreement of supplier2 INFO tests.tickets.ticket48784_test:ticket48784_test.py:68 ######################### Configure SSL/TLS agreements Done ###################### | |||
Failed | tickets/ticket48798_test.py::test_ticket48798 | 18.39 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999d0de20> def test_ticket48798(topology_st): """ Test DH param sizes offered by DS. """ topology_st.standalone.enable_tls() # Confirm that we have a connection, and that it has DH # Open a socket to the port. # Check the security settings. > size = check_socket_dh_param_size(topology_st.standalone.host, topology_st.standalone.sslport) /export/tests/tickets/ticket48798_test.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48798_test.py:23: in check_socket_dh_param_size output = check_output(cmd, shell=True) /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ input = None, capture_output = False, timeout = None, check = True popenargs = ('echo quit | openssl s_client -connect ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63601 -msg -cipher DH | grep -A 1 ServerKeyExchange',) kwargs = {'shell': True, 'stdout': -1} process = <Popen: returncode: 1 args: ['e', 'c', 'h', 'o', ' ', 'q', 'u', 'i', 't', ' ...> stdout = b'', stderr = None, retcode = 1 def run(*popenargs, input=None, capture_output=False, timeout=None, check=False, **kwargs): """Run command with arguments and return a CompletedProcess instance. The returned instance will have attributes args, returncode, stdout and stderr. By default, stdout and stderr are not captured, and those attributes will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them. If check is True and the exit code was non-zero, it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute, and output & stderr attributes if those streams were captured. If timeout is given, and the process takes too long, a TimeoutExpired exception will be raised. There is an optional argument "input", allowing you to pass bytes or a string to the subprocess's stdin. If you use this argument you may not also use the Popen constructor's "stdin" argument, as it will be used internally. By default, all communication is in bytes, and therefore any "input" should be bytes, and the stdout and stderr will be bytes. If in text mode, any "input" should be a string, and stdout and stderr will be strings decoded according to locale encoding, or by "encoding" if set. Text mode is triggered by setting any of text, encoding, errors or universal_newlines. The other arguments are the same as for the Popen constructor. """ if input is not None: if kwargs.get('stdin') is not None: raise ValueError('stdin and input arguments may not both be used.') kwargs['stdin'] = PIPE if capture_output: if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None: raise ValueError('stdout and stderr arguments may not be used ' 'with capture_output.') kwargs['stdout'] = PIPE kwargs['stderr'] = PIPE with Popen(*popenargs, **kwargs) as process: try: stdout, stderr = process.communicate(input, timeout=timeout) except TimeoutExpired as exc: process.kill() if _mswindows: # Windows accumulates the output in a single blocking # read() call run on child threads, with the timeout # being done in a join() on those threads. communicate() # _after_ kill() is required to collect that and add it # to the exception. exc.stdout, exc.stderr = process.communicate() else: # POSIX _communicate already populated the output so # far into the TimeoutExpired exception. process.wait() raise except: # Including KeyboardInterrupt, communicate handled that. process.kill() # We don't call process.wait() as .__exit__ does that for us. raise retcode = process.poll() if check and retcode: > raise CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) E subprocess.CalledProcessError: Command 'echo quit | openssl s_client -connect ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63601 -msg -cipher DH | grep -A 1 ServerKeyExchange' returned non-zero exit status 1. /usr/lib64/python3.9/subprocess.py:528: CalledProcessError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ depth=1 C = AU, ST = Queensland, L = 389ds, O = testing, CN = ssca.389ds.example.com verify return:1 depth=0 C = AU, ST = Queensland, L = 389ds, O = testing, GN = 5350df56-4809-4191-825e-246641dec5ab, CN = ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com verify return:1 DONE | |||
Failed | tickets/ticket48808_test.py::test_ticket48808 | 15.59 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b6cf490> create_user = None def test_ticket48808(topology_st, create_user): log.info('Run multiple paging controls on a single connection') users_num = 100 page_size = 30 users_list = add_users(topology_st, users_num) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] log.info('Set user bind') topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD) log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] for ii in range(3): log.info('Iteration %d' % ii) msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, search_flt, searchreq_attrlist, serverctrls=controls) rtype, rdata, rmsgid, rctrls = topology_st.standalone.result3(msgid) pctrls = [ c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType ] req_ctrl.cookie = pctrls[0].cookie msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, search_flt, searchreq_attrlist, serverctrls=controls) log.info('Set Directory Manager bind back') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) del_users(topology_st, users_list) log.info('Abandon the search') users_num = 10 page_size = 0 users_list = add_users(topology_st, users_num) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] log.info('Set user bind') topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD) log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] msgid = topology_st.standalone.search_ext(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, search_flt, searchreq_attrlist, serverctrls=controls) rtype, rdata, rmsgid, rctrls = topology_st.standalone.result3(msgid) pctrls = [ c for c in rctrls if c.controlType == SimplePagedResultsControl.controlType ] assert not pctrls[0].cookie log.info('Set Directory Manager bind back') topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) del_users(topology_st, users_list) log.info("Search should fail with 'nsPagedSizeLimit = 5'" "and 'nsslapd-pagedsizelimit = 15' with 10 users") conf_attr = b'15' user_attr = b'5' expected_rs = ldap.SIZELIMIT_EXCEEDED users_num = 10 page_size = 10 users_list = add_users(topology_st, users_num) search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] conf_attr_bck = change_conf_attr(topology_st, DN_CONFIG, 'nsslapd-pagedsizelimit', conf_attr) user_attr_bck = change_conf_attr(topology_st, TEST_USER_DN, 'nsPagedSizeLimit', user_attr) log.info('Set user bind') topology_st.standalone.simple_bind_s(TEST_USER_DN, TEST_USER_PWD) log.info('Create simple paged results control instance') req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') controls = [req_ctrl] log.info('Expect to fail with SIZELIMIT_EXCEEDED') with pytest.raises(expected_rs): > all_results = paged_search(topology_st, controls, search_flt, searchreq_attrlist) E Failed: DID NOT RAISE <class 'ldap.SIZELIMIT_EXCEEDED'> /export/tests/tickets/ticket48808_test.py:252: Failed -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48808_test:ticket48808_test.py:159 Run multiple paging controls on a single connection INFO tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 100 users INFO tests.tickets.ticket48808_test:ticket48808_test.py:166 Set user bind INFO tests.tickets.ticket48808_test:ticket48808_test.py:169 Create simple paged results control instance INFO tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 0 INFO tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 1 INFO tests.tickets.ticket48808_test:ticket48808_test.py:174 Iteration 2 INFO tests.tickets.ticket48808_test:ticket48808_test.py:193 Set Directory Manager bind back INFO tests.tickets.ticket48808_test:ticket48808_test.py:75 Deleting 100 users INFO tests.tickets.ticket48808_test:ticket48808_test.py:197 Abandon the search INFO tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 10 users INFO tests.tickets.ticket48808_test:ticket48808_test.py:204 Set user bind INFO tests.tickets.ticket48808_test:ticket48808_test.py:207 Create simple paged results control instance INFO tests.tickets.ticket48808_test:ticket48808_test.py:224 Set Directory Manager bind back INFO tests.tickets.ticket48808_test:ticket48808_test.py:75 Deleting 10 users INFO tests.tickets.ticket48808_test:ticket48808_test.py:228 Search should fail with 'nsPagedSizeLimit = 5'and 'nsslapd-pagedsizelimit = 15' with 10 users INFO tests.tickets.ticket48808_test:ticket48808_test.py:48 Adding 10 users INFO tests.tickets.ticket48808_test:ticket48808_test.py:95 Set nsslapd-pagedsizelimit to b'15'. Previous value - [b'0']. Modified suffix - cn=config. INFO tests.tickets.ticket48808_test:ticket48808_test.py:95 Set nsPagedSizeLimit to b'5'. Previous value - None. Modified suffix - uid=simplepaged_test,dc=example,dc=com. INFO tests.tickets.ticket48808_test:ticket48808_test.py:243 Set user bind INFO tests.tickets.ticket48808_test:ticket48808_test.py:246 Create simple paged results control instance INFO tests.tickets.ticket48808_test:ticket48808_test.py:250 Expect to fail with SIZELIMIT_EXCEEDED INFO tests.tickets.ticket48808_test:ticket48808_test.py:130 Getting page 0 | |||
Failed | tickets/ticket48896_test.py::test_ticket48896 | 10.28 | |
server = <lib389.DirSrv object at 0x7ff9997b0370>, curpw = 'password' newpw = 'Abcd012+', expstr = 'be ok', rc = 0 def replace_pw(server, curpw, newpw, expstr, rc): log.info('Binding as {%s, %s}' % (TESTDN, curpw)) server.simple_bind_s(TESTDN, curpw) hit = 0 log.info('Replacing password: %s -> %s, which should %s' % (curpw, newpw, expstr)) try: > server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))]) /export/tests/tickets/ticket48896_test.py:53: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=buser123,dc=example,dc=com', [(2, 'userPassword', b'Abcd012+')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7ff999d84440, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628ddc791e0, file '/export/tests/tickets/ticket48896_test.py', line 57, code replace_pw>,...code_context=[" server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997b0370> dn = 'uid=buser123,dc=example,dc=com' modlist = [(2, 'userPassword', b'Abcd012+')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=buser123,dc=example,dc=com', [(2, 'userPassword', b'Abcd012+')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997b0370> dn = 'uid=buser123,dc=example,dc=com' modlist = [(2, 'userPassword', b'Abcd012+')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (8,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997b0370>, msgid = 8, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (8, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997b0370>, msgid = 8, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff99b9ccba0>, 8, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997b0370> func = <built-in method result4 of LDAP object at 0x7ff99b9ccba0> args = (8, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 8, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=buser123,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7ff99baed0c0> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997b0370> func = <built-in method result4 of LDAP object at 0x7ff99b9ccba0> args = (8, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 8, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=buser123,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7ff99b9cc550> def test_ticket48896(topology_st): """ """ log.info('Testing Ticket 48896 - Default Setting for passwordMinTokenLength does not work') log.info("Setting global password policy with password syntax.") topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) topology_st.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'passwordCheckSyntax', b'on'), (ldap.MOD_REPLACE, 'nsslapd-pwpolicy-local', b'on')]) config = topology_st.standalone.search_s(CONFIG_DN, ldap.SCOPE_BASE, 'cn=*') mintokenlen = config[0].getValue('passwordMinTokenLength') history = config[0].getValue('passwordInHistory') log.info('Default passwordMinTokenLength == %s' % mintokenlen) log.info('Default passwordInHistory == %s' % history) log.info('Adding a user.') curpw = 'password' topology_st.standalone.add_s(Entry((TESTDN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(), 'cn': 'test user', 'sn': 'user', 'userPassword': curpw}))) newpw = 'Abcd012+' exp = 'be ok' rc = 0 > replace_pw(topology_st.standalone, curpw, newpw, exp, rc) /export/tests/tickets/ticket48896_test.py:94: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ server = <lib389.DirSrv object at 0x7ff9997b0370>, curpw = 'password' newpw = 'Abcd012+', expstr = 'be ok', rc = 0 def replace_pw(server, curpw, newpw, expstr, rc): log.info('Binding as {%s, %s}' % (TESTDN, curpw)) server.simple_bind_s(TESTDN, curpw) hit = 0 log.info('Replacing password: %s -> %s, which should %s' % (curpw, newpw, expstr)) try: server.modify_s(TESTDN, [(ldap.MOD_REPLACE, 'userPassword', ensure_bytes(newpw))]) except Exception as e: log.info("Exception (expected): %s" % type(e).__name__) hit = 1 > assert isinstance(e, rc) E TypeError: isinstance() arg 2 must be a type or tuple of types /export/tests/tickets/ticket48896_test.py:57: TypeError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48896_test:ticket48896_test.py:69 Testing Ticket 48896 - Default Setting for passwordMinTokenLength does not work INFO tests.tickets.ticket48896_test:ticket48896_test.py:71 Setting global password policy with password syntax. INFO tests.tickets.ticket48896_test:ticket48896_test.py:80 Default passwordMinTokenLength == b'3' INFO tests.tickets.ticket48896_test:ticket48896_test.py:81 Default passwordInHistory == b'6' INFO tests.tickets.ticket48896_test:ticket48896_test.py:83 Adding a user. INFO tests.tickets.ticket48896_test:ticket48896_test.py:47 Binding as {uid=buser123,dc=example,dc=com, password} INFO tests.tickets.ticket48896_test:ticket48896_test.py:51 Replacing password: password -> Abcd012+, which should be ok INFO tests.tickets.ticket48896_test:ticket48896_test.py:55 Exception (expected): INSUFFICIENT_ACCESS | |||
Failed | tickets/ticket48906_test.py::test_ticket48906_dblock_edit_update | 2.48 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999447fa0> def test_ticket48906_dblock_edit_update(topology_st): topology_st.standalone.log.info('###################################') topology_st.standalone.log.info('###') topology_st.standalone.log.info('### Check that after stop') topology_st.standalone.log.info('### - dse.ldif contains DBLOCK_LDAP_UPDATE') topology_st.standalone.log.info('### - guardian contains DBLOCK_LDAP_UPDATE') topology_st.standalone.log.info('### Check that edit dse+restart') topology_st.standalone.log.info('### - monitor contains DBLOCK_EDIT_UPDATE') topology_st.standalone.log.info('### - configured contains DBLOCK_EDIT_UPDATE') topology_st.standalone.log.info('### Check that after stop') topology_st.standalone.log.info('### - dse.ldif contains DBLOCK_EDIT_UPDATE') topology_st.standalone.log.info('### - guardian contains DBLOCK_EDIT_UPDATE') topology_st.standalone.log.info('###') topology_st.standalone.log.info('###################################') topology_st.standalone.stop(timeout=10) _check_dse_ldif_value(topology_st, attr=DBLOCK_ATTR_CONFIG, expected_value=DBLOCK_LDAP_UPDATE) _check_guardian_value(topology_st, attr=DBLOCK_ATTR_GUARDIAN, expected_value=DBLOCK_LDAP_UPDATE) dse_ref_ldif = topology_st.standalone.confdir + '/dse.ldif' dse_new_ldif = topology_st.standalone.confdir + '/dse.ldif.new' dse_ref = open(dse_ref_ldif, "r") dse_new = open(dse_new_ldif, "w") # Change the DBLOCK in dse.ldif value = None while True: line = dse_ref.readline() if (line == ''): break elif DBLOCK_ATTR_CONFIG in line.lower(): value = line.split()[1] > assert (value == DBLOCK_LDAP_UPDATE) E AssertionError: assert 'on' == '20000' E - 20000 E + on /export/tests/tickets/ticket48906_test.py:222: AssertionError ------------------------------Captured stdout call------------------------------ line locks:20000 expected_value 20000 value 20000 -------------------------------Captured log call-------------------------------- INFO lib389:ticket48906_test.py:191 ################################### INFO lib389:ticket48906_test.py:192 ### INFO lib389:ticket48906_test.py:193 ### Check that after stop INFO lib389:ticket48906_test.py:194 ### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:195 ### - guardian contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:196 ### Check that edit dse+restart INFO lib389:ticket48906_test.py:197 ### - monitor contains DBLOCK_EDIT_UPDATE INFO lib389:ticket48906_test.py:198 ### - configured contains DBLOCK_EDIT_UPDATE INFO lib389:ticket48906_test.py:199 ### Check that after stop INFO lib389:ticket48906_test.py:200 ### - dse.ldif contains DBLOCK_EDIT_UPDATE INFO lib389:ticket48906_test.py:201 ### - guardian contains DBLOCK_EDIT_UPDATE INFO lib389:ticket48906_test.py:202 ### INFO lib389:ticket48906_test.py:203 ################################### | |||
Failed | tickets/ticket48906_test.py::test_ticket48906_dblock_robust | 3.66 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999447fa0> def test_ticket48906_dblock_robust(topology_st): topology_st.standalone.log.info('###################################') topology_st.standalone.log.info('###') topology_st.standalone.log.info('### Check that the following values are rejected') topology_st.standalone.log.info('### - negative value') topology_st.standalone.log.info('### - insuffisant value') topology_st.standalone.log.info('### - invalid value') topology_st.standalone.log.info('### Check that minimum value is accepted') topology_st.standalone.log.info('###') topology_st.standalone.log.info('###################################') topology_st.standalone.start(timeout=10) > _check_monitored_value(topology_st, DBLOCK_EDIT_UPDATE) /export/tests/tickets/ticket48906_test.py:256: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff999447fa0> expected_value = '40000' def _check_monitored_value(topology_st, expected_value): entries = topology_st.standalone.search_s(ldbm_monitor, ldap.SCOPE_BASE, '(objectclass=*)') > assert (entries[0].hasValue(DBLOCK_ATTR_MONITOR) and entries[0].getValue(DBLOCK_ATTR_MONITOR) == ensure_bytes(expected_value)) E AssertionError: assert (True and b'20000' == b'40000' E + where True = <bound method Entry.hasValue of dn: cn=database,cn=monitor,cn=ldbm database,cn=plugins,cn=config\ncn: database\nnsslapd-...ate: 0\nnsslapd-db-pages-in-use: 11\nnsslapd-db-txn-region-wait-rate: 1\nobjectClass: top\nobjectClass: extensibleObject\n\n>('nsslapd-db-configured-locks') E + where <bound method Entry.hasValue of dn: cn=database,cn=monitor,cn=ldbm database,cn=plugins,cn=config\ncn: database\nnsslapd-...ate: 0\nnsslapd-db-pages-in-use: 11\nnsslapd-db-txn-region-wait-rate: 1\nobjectClass: top\nobjectClass: extensibleObject\n\n> = dn: cn=database,cn=monitor,cn=ldbm database,cn=plugins,cn=config\ncn: database\nnsslapd-db-abort-rate: 0\nnsslapd-db-acti...rate: 0\nnsslapd-db-pages-in-use: 11\nnsslapd-db-txn-region-wait-rate: 1\nobjectClass: top\nobjectClass: extensibleObject\n\n.hasValue E At index 0 diff: b'2' != b'4' E Full diff: E - b'40000' E ? ^ E + b'20000' E ? ^) /export/tests/tickets/ticket48906_test.py:105: AssertionError -------------------------------Captured log call-------------------------------- INFO lib389:ticket48906_test.py:245 ################################### INFO lib389:ticket48906_test.py:246 ### INFO lib389:ticket48906_test.py:247 ### Check that the following values are rejected INFO lib389:ticket48906_test.py:248 ### - negative value INFO lib389:ticket48906_test.py:249 ### - insuffisant value INFO lib389:ticket48906_test.py:250 ### - invalid value INFO lib389:ticket48906_test.py:251 ### Check that minimum value is accepted INFO lib389:ticket48906_test.py:252 ### INFO lib389:ticket48906_test.py:253 ################################### | |||
Failed | tickets/ticket48916_test.py::test_ticket48916 | 75.78 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99986a9d0> def test_ticket48916(topology_m2): """ https://bugzilla.redhat.com/show_bug.cgi?id=1353629 This is an issue with ID exhaustion in DNA causing a crash. To access each DirSrv instance use: topology_m2.ms["supplier1"], topology_m2.ms["supplier2"], ..., topology_m2.hub1, ..., topology_m2.consumer1,... """ if DEBUGGING: # Add debugging steps(if any)... pass # Enable the plugin on both servers dna_m1 = topology_m2.ms["supplier1"].plugins.get('Distributed Numeric Assignment Plugin') dna_m2 = topology_m2.ms["supplier2"].plugins.get('Distributed Numeric Assignment Plugin') # Configure it # Create the container for the ranges to go into. topology_m2.ms["supplier1"].add_s(Entry( ('ou=Ranges,%s' % DEFAULT_SUFFIX, { 'objectClass': 'top organizationalUnit'.split(' '), 'ou': 'Ranges', }) )) # Create the dnaAdmin? # For now we just pinch the dn from the dna_m* types, and add the relevant child config # but in the future, this could be a better plugin template type from lib389 config_dn = dna_m1.dn topology_m2.ms["supplier1"].add_s(Entry( ('cn=uids,%s' % config_dn, { 'objectClass': 'top dnaPluginConfig'.split(' '), 'cn': 'uids', 'dnatype': 'uidNumber gidNumber'.split(' '), 'dnafilter': '(objectclass=posixAccount)', 'dnascope': '%s' % DEFAULT_SUFFIX, 'dnaNextValue': '1', 'dnaMaxValue': '50', 'dnasharedcfgdn': 'ou=Ranges,%s' % DEFAULT_SUFFIX, 'dnaThreshold': '0', 'dnaRangeRequestTimeout': '60', 'dnaMagicRegen': '-1', 'dnaRemoteBindDN': 'uid=dnaAdmin,ou=People,%s' % DEFAULT_SUFFIX, 'dnaRemoteBindCred': 'secret123', 'dnaNextRange': '80-90' }) )) topology_m2.ms["supplier2"].add_s(Entry( ('cn=uids,%s' % config_dn, { 'objectClass': 'top dnaPluginConfig'.split(' '), 'cn': 'uids', 'dnatype': 'uidNumber gidNumber'.split(' '), 'dnafilter': '(objectclass=posixAccount)', 'dnascope': '%s' % DEFAULT_SUFFIX, 'dnaNextValue': '61', 'dnaMaxValue': '70', 'dnasharedcfgdn': 'ou=Ranges,%s' % DEFAULT_SUFFIX, 'dnaThreshold': '2', 'dnaRangeRequestTimeout': '60', 'dnaMagicRegen': '-1', 'dnaRemoteBindDN': 'uid=dnaAdmin,ou=People,%s' % DEFAULT_SUFFIX, 'dnaRemoteBindCred': 'secret123', }) )) # Enable the plugins dna_m1.enable() dna_m2.enable() # Restart the instances topology_m2.ms["supplier1"].restart(60) topology_m2.ms["supplier2"].restart(60) # Wait for a replication ..... time.sleep(40) # Allocate the 10 members to exhaust for i in range(1, 11): _create_user(topology_m2.ms["supplier2"], i) # Allocate the 11th > _create_user(topology_m2.ms["supplier2"], 11) /export/tests/tickets/ticket48916_test.py:126: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48916_test.py:21: in _create_user inst.add_s(Entry( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:439: in add_s return self.add_ext_s(dn,modlist,None,None) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:171: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9997a3250> func = <built-in method result4 of LDAP object at 0x7ff99938e3f0> args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.OPERATIONS_ERROR: {'msgtype': 105, 'msgid': 13, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Allocation of a new value for range cn=uids,cn=distributed numeric assignment plugin,cn=plugins,cn=config failed! Unable to proceed.'} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: OPERATIONS_ERROR -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bbee192e-5c6c-40fd-a86a-6575304ba084 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 046e510e-8307-408e-86df-c0d3b7691a91 / got description=bbee192e-5c6c-40fd-a86a-6575304ba084) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Failed | tickets/ticket48944_test.py::test_ticket48944 | 100.69 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff999813520> def test_ticket48944(topo): """On a read only replica invalid state info can accumulate :id: 833be131-f3bf-493e-97c6-3121438a07b1 :feature: Account Policy Plugin :setup: Two supplier and two consumer setup :steps: 1. Configure Account policy plugin with alwaysrecordlogin set to yes 2. Check if entries are synced across suppliers and consumers 3. Stop all suppliers and consumers 4. Start supplier1 and bind as user1 to create lastLoginTime attribute 5. Start supplier2 and wait for the sync of lastLoginTime attribute 6. Stop supplier1 and bind as user1 from supplier2 7. Check if lastLoginTime attribute is updated and greater than supplier1 8. Stop supplier2, start consumer1, consumer2 and then supplier2 9. Check if lastLoginTime attribute is updated on both consumers 10. Bind as user1 to both consumers and check the value is updated 11. Check if lastLoginTime attribute is not updated from consumers 12. Start supplier1 and make sure the lastLoginTime attribute is not updated on consumers 13. Bind as user1 from supplier1 and check if all suppliers and consumers have the same value 14. Check error logs of consumers for "deletedattribute;deleted" message :expectedresults: No accumulation of replica invalid state info on consumers """ log.info("Ticket 48944 - On a read only replica invalid state info can accumulate") user_name = 'newbzusr' tuserdn = 'uid={}1,ou=people,{}'.format(user_name, SUFFIX) inst_list = ['supplier1', 'supplier2', 'consumer1', 'consumer2'] for inst_name in inst_list: _enable_plugin(topo, inst_name) log.info('Sleep for 10secs for the server to come up') time.sleep(10) log.info('Add few entries to server and check if entries are replicated') for nos in range(10): userdn = 'uid={}{},ou=people,{}'.format(user_name, nos, SUFFIX) try: topo.ms['supplier1'].add_s(Entry((userdn, { 'objectclass': 'top person'.split(), 'objectclass': 'inetorgperson', 'cn': user_name, 'sn': user_name, 'userpassword': USER_PW, 'mail': '{}@redhat.com'.format(user_name)}))) except ldap.LDAPError as e: log.error('Failed to add {} user: error {}'.format(userdn, e.message['desc'])) raise e log.info('Checking if entries are synced across suppliers and consumers') entries_m1 = topo.ms['supplier1'].search_s(SUFFIX, ldap.SCOPE_SUBTREE, 'uid={}*'.format(user_name), ['uid=*']) exp_entries = str(entries_m1).count('dn: uid={}*'.format(user_name)) entries_m2 = topo.ms['supplier2'].search_s(SUFFIX, ldap.SCOPE_SUBTREE, 'uid={}*'.format(user_name), ['uid=*']) act_entries = str(entries_m2).count('dn: uid={}*'.format(user_name)) assert act_entries == exp_entries inst_list = ['consumer1', 'consumer2'] for inst in inst_list: entries_other = topo.cs[inst].search_s(SUFFIX, ldap.SCOPE_SUBTREE, 'uid={}*'.format(user_name), ['uid=*']) act_entries = str(entries_other).count('dn: uid={}*'.format(user_name)) assert act_entries == exp_entries topo.ms['supplier2'].stop(timeout=10) topo.ms['supplier1'].stop(timeout=10) topo.cs['consumer1'].stop(timeout=10) topo.cs['consumer2'].stop(timeout=10) topo.ms['supplier1'].start(timeout=10) lastLogin_m1_1 = _last_login_time(topo, tuserdn, 'supplier1', 'bind_n_check') log.info('Start supplier2 to sync lastLoginTime attribute from supplier1') topo.ms['supplier2'].start(timeout=10) time.sleep(5) log.info('Stop supplier1') topo.ms['supplier1'].stop(timeout=10) log.info('Bind as user1 to supplier2 and check if lastLoginTime attribute is greater than supplier1') > lastLogin_m2_1 = _last_login_time(topo, tuserdn, 'supplier2', 'bind_n_check') /export/tests/tickets/ticket48944_test.py:158: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48944_test.py:36: in _last_login_time topo.ms[inst_name].simple_bind_s(userdn, USER_PW) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:455: in simple_bind_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99b9d0640> func = <built-in method result4 of LDAP object at 0x7ff99b9b7f00> args = (3, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INVALID_CREDENTIALS: {'msgtype': 97, 'msgid': 3, 'result': 49, 'desc': 'Invalid credentials', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INVALID_CREDENTIALS -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d42f62bc-c0f7-4605-940b-10da6657f38c / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c72d337a-7440-41fa-a2e2-d669e7879b11 / got description=d42f62bc-c0f7-4605-940b-10da6657f38c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 9327dbf4-b537-431a-adfe-2f3438ef40ed / got description=c72d337a-7440-41fa-a2e2-d669e7879b11) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:169 Joining consumer consumer2 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect a844c930-00f7-4cd1-948a-7652b071e350 / got description=9327dbf4-b537-431a-adfe-2f3438ef40ed) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 already exists INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is was created -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48944_test:ticket48944_test.py:108 Ticket 48944 - On a read only replica invalid state info can accumulate INFO tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO tests.tickets.ticket48944_test:ticket48944_test.py:54 Configure Account policy plugin on supplier1 INFO tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO tests.tickets.ticket48944_test:ticket48944_test.py:54 Configure Account policy plugin on supplier2 INFO tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO tests.tickets.ticket48944_test:ticket48944_test.py:69 Configure Account policy plugin on consumer1 INFO tests.tickets.ticket48944_test:ticket48944_test.py:52 Enable account policy plugin and configure required attributes INFO tests.tickets.ticket48944_test:ticket48944_test.py:69 Configure Account policy plugin on consumer2 INFO tests.tickets.ticket48944_test:ticket48944_test.py:115 Sleep for 10secs for the server to come up INFO tests.tickets.ticket48944_test:ticket48944_test.py:117 Add few entries to server and check if entries are replicated INFO tests.tickets.ticket48944_test:ticket48944_test.py:132 Checking if entries are synced across suppliers and consumers INFO tests.tickets.ticket48944_test:ticket48944_test.py:152 Start supplier2 to sync lastLoginTime attribute from supplier1 INFO tests.tickets.ticket48944_test:ticket48944_test.py:155 Stop supplier1 INFO tests.tickets.ticket48944_test:ticket48944_test.py:157 Bind as user1 to supplier2 and check if lastLoginTime attribute is greater than supplier1 | |||
Failed | tickets/ticket48956_test.py::test_ticket48956 | 15.47 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999577bb0> def test_ticket48956(topology_st): """Write your testcase here... Also, if you need any testcase initialization, please, write additional fixture for that(include finalizer). """ topology_st.standalone.modify_s(ACCT_POLICY_PLUGIN_DN, [(ldap.MOD_REPLACE, 'nsslapd-pluginarg0', ensure_bytes(ACCT_POLICY_CONFIG_DN))]) topology_st.standalone.modify_s(ACCT_POLICY_CONFIG_DN, [(ldap.MOD_REPLACE, 'alwaysrecordlogin', b'yes'), (ldap.MOD_REPLACE, 'stateattrname', b'lastLoginTime'), (ldap.MOD_REPLACE, 'altstateattrname', b'createTimestamp'), (ldap.MOD_REPLACE, 'specattrname', b'acctPolicySubentry'), (ldap.MOD_REPLACE, 'limitattrname', b'accountInactivityLimit')]) # Enable the plugins topology_st.standalone.plugins.enable(name=PLUGIN_ACCT_POLICY) topology_st.standalone.restart(timeout=10) # Check inactivity on standard suffix (short) > _check_inactivity(topology_st, SUFFIX) /export/tests/tickets/ticket48956_test.py:107: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket48956_test.py:78: in _check_inactivity assert (_check_status(topology_st, TEST_USER_DN, b'- activated')) /export/tests/tickets/ticket48956_test.py:39: in _check_status output = subprocess.check_output([nsaccountstatus, '-Z', topology_st.standalone.serverid, /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, /usr/lib64/python3.9/subprocess.py:505: in run with Popen(*popenargs, **kwargs) as process: /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/ns-accountstatus.pl', '-Z', 'stand...> args = ['/usr/sbin/ns-accountstatus.pl', '-Z', 'standalone1', '-D', 'cn=Directory Manager', '-w', ...] executable = b'/usr/sbin/ns-accountstatus.pl', preexec_fn = None close_fds = True, pass_fds = (), cwd = None, env = None, startupinfo = None creationflags = 0, shell = False, p2cread = -1, p2cwrite = -1, c2pread = 136 c2pwrite = 137, errread = -1, errwrite = -1, restore_signals = True, gid = None gids = None, uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/ns-accountstatus.pl' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48956_test:ticket48956_test.py:54 ######################### Adding Account Policy entry: cn=Account Inactivation Policy,dc=example,dc=com ###################### INFO tests.tickets.ticket48956_test:ticket48956_test.py:61 ######################### Adding Test User entry: uid=ticket48956user,dc=example,dc=com ###################### | |||
Failed | tickets/ticket49039_test.py::test_ticket49039 | 17.63 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff999837670> def test_ticket49039(topo): """Test "password must change" verses "password min age". Min age should not block password update if the password was reset. """ # Setup SSL (for ldappasswd test) topo.standalone.enable_tls() # Configure password policy try: policy = PwPolicyManager(topo.standalone) policy.set_global_policy(properties={'nsslapd-pwpolicy-local': 'on', 'passwordMustChange': 'on', 'passwordExp': 'on', 'passwordMaxAge': '86400000', 'passwordMinAge': '8640000', 'passwordChange': 'on'}) except ldap.LDAPError as e: log.fatal('Failed to set password policy: ' + str(e)) # Add user, bind, and set password try: topo.standalone.add_s(Entry((USER_DN, { 'objectclass': 'top extensibleObject'.split(), 'uid': 'user1', 'userpassword': PASSWORD }))) except ldap.LDAPError as e: log.fatal('Failed to add user: error ' + e.args[0]['desc']) assert False # Reset password as RootDN try: topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False time.sleep(1) # Reset password as user try: topo.standalone.simple_bind_s(USER_DN, PASSWORD) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False try: > topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) /export/tests/tickets/ticket49039_test.py:75: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user,dc=example,dc=com', [(2, 'userpassword', b'password')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x5628ddd96070, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...93, function='_hookexec', code_context=[' return self._inner_hookexec(hook, methods, kwargs)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628ddda58a0, file '/export/tests/tickets/ticket49039_test.py', line 78, code test_ticket4...[" topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999673e80> dn = 'uid=user,dc=example,dc=com', modlist = [(2, 'userpassword', b'password')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user,dc=example,dc=com', [(2, 'userpassword', b'password')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999673e80> dn = 'uid=user,dc=example,dc=com', modlist = [(2, 'userpassword', b'password')] serverctrls = None, clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (7,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999673e80>, msgid = 7, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (7, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999673e80>, msgid = 7, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff9996750c0>, 7, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999673e80> func = <built-in method result4 of LDAP object at 0x7ff9996750c0> args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 7, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7ff9998b22c0> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999673e80> func = <built-in method result4 of LDAP object at 0x7ff9996750c0> args = (7, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 7, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topo = <lib389.topologies.TopologyMain object at 0x7ff999837670> def test_ticket49039(topo): """Test "password must change" verses "password min age". Min age should not block password update if the password was reset. """ # Setup SSL (for ldappasswd test) topo.standalone.enable_tls() # Configure password policy try: policy = PwPolicyManager(topo.standalone) policy.set_global_policy(properties={'nsslapd-pwpolicy-local': 'on', 'passwordMustChange': 'on', 'passwordExp': 'on', 'passwordMaxAge': '86400000', 'passwordMinAge': '8640000', 'passwordChange': 'on'}) except ldap.LDAPError as e: log.fatal('Failed to set password policy: ' + str(e)) # Add user, bind, and set password try: topo.standalone.add_s(Entry((USER_DN, { 'objectclass': 'top extensibleObject'.split(), 'uid': 'user1', 'userpassword': PASSWORD }))) except ldap.LDAPError as e: log.fatal('Failed to add user: error ' + e.args[0]['desc']) assert False # Reset password as RootDN try: topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False time.sleep(1) # Reset password as user try: topo.standalone.simple_bind_s(USER_DN, PASSWORD) except ldap.LDAPError as e: log.fatal('Failed to bind: error ' + e.args[0]['desc']) assert False try: topo.standalone.modify_s(USER_DN, [(ldap.MOD_REPLACE, 'userpassword', ensure_bytes(PASSWORD))]) except ldap.LDAPError as e: log.fatal('Failed to change password: error ' + e.args[0]['desc']) > assert False E assert False /export/tests/tickets/ticket49039_test.py:78: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket49039_test:ticket49039_test.py:77 Failed to change password: error Insufficient access | |||
Failed | tickets/ticket49072_test.py::test_ticket49072_basedn | 12.35 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff999add0d0> def test_ticket49072_basedn(topo): """memberOf fixup task does not validate args :id: dce9b898-119d-42b8-a236-1130e59bfe18 :feature: memberOf :setup: Standalone instance, with memberOf plugin :steps: 1. Run fixup-memberOf.pl with invalid DN entry 2. Check if error log reports "Failed to get be backend" :expectedresults: Fixup-memberOf.pl task should complete, but errors logged. """ log.info("Ticket 49072 memberof fixup task with invalid basedn...") topo.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topo.standalone.restart(timeout=10) if ds_is_older('1.3'): inst_dir = topo.standalone.get_inst_dir() memof_task = os.path.join(inst_dir, FIXUP_MEMOF) try: output = subprocess.check_output([memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', TEST_BASEDN, '-f', FILTER]) except subprocess.CalledProcessError as err: output = err.output else: sbin_dir = topo.standalone.get_sbin_dir() memof_task = os.path.join(sbin_dir, FIXUP_MEMOF) try: > output = subprocess.check_output( [memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', TEST_BASEDN, '-Z', SERVERID_STANDALONE, '-f', FILTER]) /export/tests/tickets/ticket49072_test.py:55: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, /usr/lib64/python3.9/subprocess.py:505: in run with Popen(*popenargs, **kwargs) as process: /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Dire...> args = ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Directory Manager', '-w', 'password', '-b', ...] executable = b'/usr/sbin/fixup-memberof.pl', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = None, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 137, c2pwrite = 138 errread = -1, errwrite = -1, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/fixup-memberof.pl' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49072_test:ticket49072_test.py:40 Ticket 49072 memberof fixup task with invalid basedn... | |||
Failed | tickets/ticket49072_test.py::test_ticket49072_filter | 12.29 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff999add0d0> def test_ticket49072_filter(topo): """memberOf fixup task does not validate args :id: dde9e893-119d-42c8-a236-1190e56bfe98 :feature: memberOf :setup: Standalone instance, with memberOf plugin :steps: 1. Run fixup-memberOf.pl with invalid filter 2. Check if error log reports "Bad search filter" :expectedresults: Fixup-memberOf.pl task should complete, but errors logged. """ log.info("Ticket 49072 memberof fixup task with invalid filter...") log.info('Wait for 10 secs and check if task is completed') time.sleep(10) task_memof = 'cn=memberOf task,cn=tasks,cn=config' if topo.standalone.search_s(task_memof, ldap.SCOPE_SUBTREE, 'cn=memberOf_fixup*', ['dn:']): log.info('memberof task is still running, wait for +10 secs') time.sleep(10) if ds_is_older('1.3'): inst_dir = topo.standalone.get_inst_dir() memof_task = os.path.join(inst_dir, FIXUP_MEMOF) try: output = subprocess.check_output([memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', SUFFIX, '-f', TEST_FILTER]) except subprocess.CalledProcessError as err: output = err.output else: sbin_dir = topo.standalone.get_sbin_dir() memof_task = os.path.join(sbin_dir, FIXUP_MEMOF) try: > output = subprocess.check_output( [memof_task, '-D', DN_DM, '-w', PASSWORD, '-b', SUFFIX, '-Z', SERVERID_STANDALONE, '-f', TEST_FILTER]) /export/tests/tickets/ticket49072_test.py:96: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.9/subprocess.py:424: in check_output return run(*popenargs, stdout=PIPE, timeout=timeout, check=True, /usr/lib64/python3.9/subprocess.py:505: in run with Popen(*popenargs, **kwargs) as process: /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Dire...> args = ['/usr/sbin/fixup-memberof.pl', '-D', 'cn=Directory Manager', '-w', 'password', '-b', ...] executable = b'/usr/sbin/fixup-memberof.pl', preexec_fn = None, close_fds = True pass_fds = (), cwd = None, env = None, startupinfo = None, creationflags = 0 shell = False, p2cread = -1, p2cwrite = -1, c2pread = 137, c2pwrite = 138 errread = -1, errwrite = -1, restore_signals = True, gid = None, gids = None uid = None, umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: '/usr/sbin/fixup-memberof.pl' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49072_test:ticket49072_test.py:77 Ticket 49072 memberof fixup task with invalid filter... INFO tests.tickets.ticket49072_test:ticket49072_test.py:78 Wait for 10 secs and check if task is completed | |||
Failed | tickets/ticket49073_test.py::test_ticket49073 | 37.84 | |
topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff9995646d0> def test_ticket49073(topology_m2): """Write your replication test here. To access each DirSrv instance use: topology_m2.ms["supplier1"], topology_m2.ms["supplier2"], ..., topology_m2.hub1, ..., topology_m2.consumer1,... Also, if you need any testcase initialization, please, write additional fixture for that(include finalizer). """ topology_m2.ms["supplier1"].plugins.enable(name=PLUGIN_MEMBER_OF) topology_m2.ms["supplier1"].restart(timeout=10) topology_m2.ms["supplier2"].plugins.enable(name=PLUGIN_MEMBER_OF) topology_m2.ms["supplier2"].restart(timeout=10) # Configure fractional to prevent total init to send memberof ents = topology_m2.ms["supplier1"].agreement.list(suffix=SUFFIX) assert len(ents) == 1 log.info('update %s to add nsDS5ReplicatedAttributeListTotal' % ents[0].dn) > topology_m2.ms["supplier1"].modify_s(ents[0].dn, [(ldap.MOD_REPLACE, 'nsDS5ReplicatedAttributeListTotal', '(objectclass=*) $ EXCLUDE '), (ldap.MOD_REPLACE, 'nsDS5ReplicatedAttributeList', '(objectclass=*) $ EXCLUDE memberOf')]) /export/tests/tickets/ticket49073_test.py:97: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:612: in modify_ext_s msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:609: in modify_ext return self._ldap_call(self._l.modify_ext,dn,modlist,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls)) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999564e20> func = <built-in method modify_ext of LDAP object at 0x7ff999523240> args = ('cn=002,cn=replica,cn=dc\\3Dexample\\2Cdc\\3Dcom,cn=mapping tree,cn=config', [(2, 'nsDS5ReplicatedAttributeListTotal', '(objectclass=*) $ EXCLUDE '), (2, 'nsDS5ReplicatedAttributeList', '(objectclass=*) $ EXCLUDE memberOf')], None, None) kwargs = {}, diagnostic_message_success = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E TypeError: ('Tuple_to_LDAPMod(): expected a byte string in the list', '(') /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: TypeError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 36b38b4f-fa21-4fdb-b670-be8872820953 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 99139116-1d41-497d-9111-c605aaab5a0f / got description=36b38b4f-fa21-4fdb-b670-be8872820953) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49073_test:ticket49073_test.py:96 update cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal | |||
Failed | tickets/ticket49104_test.py::test_ticket49104_setup | 7.96 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999ace8e0> def test_ticket49104_setup(topology_st): """ Generate an ldif file having 10K entries and import it. """ # Generate a test ldif (100k entries) ldif_dir = topology_st.standalone.get_ldif_dir() import_ldif = ldif_dir + '/49104.ldif' try: > topology_st.standalone.buildLDIF(100000, import_ldif) /export/tests/tickets/ticket49104_test.py:30: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff999ace400>, num = 100000 ldif_file = '/var/lib/dirsrv/slapd-standalone1/ldif/49104.ldif' suffix = 'dc=example,dc=com' def buildLDIF(self, num, ldif_file, suffix='dc=example,dc=com'): """Generate a simple ldif file using the dbgen.pl script, and set the ownership and permissions to match the user that the server runs as. @param num - number of entries to create @param ldif_file - ldif file name(including the path) @suffix - DN of the parent entry in the ldif file @return - nothing @raise - OSError """ > raise Exception("Perl tools disabled on this system. Try dbgen py module.") E Exception: Perl tools disabled on this system. Try dbgen py module. /usr/local/lib/python3.9/site-packages/lib389/__init__.py:3236: Exception -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket49104_test.py::test_ticket49104 | 2.22 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff999ace8e0> def test_ticket49104(topology_st): """ Run dbscan with valgrind changing the truncate size. If there is no Invalid report, we can claim the test has passed. """ log.info("Test ticket 49104 -- dbscan crashes by memory corruption") myvallog = '/tmp/val49104.out' if os.path.exists(myvallog): os.remove(myvallog) prog = os.path.join(topology_st.standalone.get_bin_dir(), 'dbscan-bin') valcmd = 'valgrind --tool=memcheck --leak-check=yes --num-callers=40 --log-file=%s ' % myvallog if topology_st.standalone.has_asan(): valcmd = '' id2entry = os.path.join(topology_st.standalone.dbdir, DEFAULT_BENAME, 'id2entry.db') for i in range(20, 30): cmd = valcmd + '%s -f %s -t %d -R' % (prog, id2entry , i) log.info('Running script: %s' % cmd) > proc = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) /export/tests/tickets/ticket49104_test.py:63: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/lib64/python3.9/subprocess.py:951: in __init__ self._execute_child(args, executable, preexec_fn, close_fds, _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <Popen: returncode: 255 args: ['valgrind', '--tool=memcheck', '--leak-check=...> args = ['valgrind', '--tool=memcheck', '--leak-check=yes', '--num-callers=40', '--log-file=/tmp/val49104.out', '/usr/bin/dbscan-bin', ...] executable = b'valgrind', preexec_fn = None, close_fds = True, pass_fds = () cwd = None, env = None, startupinfo = None, creationflags = 0, shell = False p2cread = -1, p2cwrite = -1, c2pread = 145, c2pwrite = 146, errread = -1 errwrite = -1, restore_signals = True, gid = None, gids = None, uid = None umask = -1, start_new_session = False def _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session): """Execute program (POSIX version)""" if isinstance(args, (str, bytes)): args = [args] elif isinstance(args, os.PathLike): if shell: raise TypeError('path-like args is not allowed when ' 'shell is true') args = [args] else: args = list(args) if shell: # On Android the default shell is at '/system/bin/sh'. unix_shell = ('/system/bin/sh' if hasattr(sys, 'getandroidapilevel') else '/bin/sh') args = [unix_shell, "-c"] + args if executable: args[0] = executable if executable is None: executable = args[0] sys.audit("subprocess.Popen", executable, args, cwd, env) if (_USE_POSIX_SPAWN and os.path.dirname(executable) and preexec_fn is None and not close_fds and not pass_fds and cwd is None and (p2cread == -1 or p2cread > 2) and (c2pwrite == -1 or c2pwrite > 2) and (errwrite == -1 or errwrite > 2) and not start_new_session and gid is None and gids is None and uid is None and umask < 0): self._posix_spawn(args, executable, env, restore_signals, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) return orig_executable = executable # For transferring possible exec failure from child to parent. # Data format: "exception name:hex errno:description" # Pickle is not used; it is complex and involves memory allocation. errpipe_read, errpipe_write = os.pipe() # errpipe_write must not be in the standard io 0, 1, or 2 fd range. low_fds_to_close = [] while errpipe_write < 3: low_fds_to_close.append(errpipe_write) errpipe_write = os.dup(errpipe_write) for low_fd in low_fds_to_close: os.close(low_fd) try: try: # We must avoid complex work that could involve # malloc or free in the child process to avoid # potential deadlocks, thus we do all this here. # and pass it to fork_exec() if env is not None: env_list = [] for k, v in env.items(): k = os.fsencode(k) if b'=' in k: raise ValueError("illegal environment variable name") env_list.append(k + b'=' + os.fsencode(v)) else: env_list = None # Use execv instead of execve. executable = os.fsencode(executable) if os.path.dirname(executable): executable_list = (executable,) else: # This matches the behavior of os._execvpe(). executable_list = tuple( os.path.join(os.fsencode(dir), executable) for dir in os.get_exec_path(env)) fds_to_keep = set(pass_fds) fds_to_keep.add(errpipe_write) self.pid = _posixsubprocess.fork_exec( args, executable_list, close_fds, tuple(sorted(map(int, fds_to_keep))), cwd, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, start_new_session, gid, gids, uid, umask, preexec_fn) self._child_created = True finally: # be sure the FD is closed no matter what os.close(errpipe_write) self._close_pipe_fds(p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite) # Wait for exec to fail or succeed; possibly raising an # exception (limited in size) errpipe_data = bytearray() while True: part = os.read(errpipe_read, 50000) errpipe_data += part if not part or len(errpipe_data) > 50000: break finally: # be sure the FD is closed no matter what os.close(errpipe_read) if errpipe_data: try: pid, sts = os.waitpid(self.pid, 0) if pid == self.pid: self._handle_exitstatus(sts) else: self.returncode = sys.maxsize except ChildProcessError: pass try: exception_name, hex_errno, err_msg = ( errpipe_data.split(b':', 2)) # The encoding here should match the encoding # written in by the subprocess implementations # like _posixsubprocess err_msg = err_msg.decode() except ValueError: exception_name = b'SubprocessError' hex_errno = b'0' err_msg = 'Bad exception data from child: {!r}'.format( bytes(errpipe_data)) child_exception_type = getattr( builtins, exception_name.decode('ascii'), SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) child_exec_never_called = (err_msg == "noexec") if child_exec_never_called: err_msg = "" # The error must be from chdir(cwd). err_filename = cwd else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) > raise child_exception_type(errno_num, err_msg, err_filename) E FileNotFoundError: [Errno 2] No such file or directory: 'valgrind' /usr/lib64/python3.9/subprocess.py:1821: FileNotFoundError | |||
Failed | tickets/ticket49192_test.py::test_ticket49192 | 10.79 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff999384b20> def test_ticket49192(topo): """Trigger deadlock when removing suffix """ # # Create a second suffix/backend # log.info('Creating second backend...') > topo.standalone.backends.create(None, properties={ BACKEND_NAME: "Second_Backend", 'suffix': "o=hang.com", }) /export/tests/tickets/ticket49192_test.py:35: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1215: in create return co.create(rdn, properties, self._basedn) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.backend.Backend object at 0x7ff999af0760>, dn = None properties = {'name': 'Second_Backend', 'suffix': 'o=hang.com'} basedn = 'cn=ldbm database,cn=plugins,cn=config', create_mapping_tree = True def create(self, dn=None, properties=None, basedn=DN_LDBM, create_mapping_tree=True): """Add a new backend entry, create mapping tree, and, if requested, sample entries :param dn: DN of the new entry :type dn: str :param properties: Attributes and parameters for the new entry :type properties: dict :param basedn: Base DN of the new entry :type basedn: str :param create_mapping_tree: If a related mapping tree node should be created :type create_mapping_tree: bool :returns: DSLdapObject of the created entry """ sample_entries = False parent_suffix = False # normalize suffix (remove spaces between comps) if dn is not None: dn_comps = ldap.dn.explode_dn(dn.lower()) dn = ",".join(dn_comps) if properties is not None: > suffix_dn = properties['nsslapd-suffix'].lower() E KeyError: 'nsslapd-suffix' /usr/local/lib/python3.9/site-packages/lib389/backend.py:613: KeyError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49192_test:ticket49192_test.py:34 Creating second backend... | |||
Failed | tickets/ticket49287_test.py::test_ticket49287 | 38.18 | |
self = <lib389.mappingTree.MappingTreeLegacy object at 0x7ff9992a0250> suffix = 'dc=test,dc=com', bename = 'test', parent = None def create(self, suffix=None, bename=None, parent=None): ''' Create a mapping tree entry (under "cn=mapping tree,cn=config"), for the 'suffix' and that is stored in 'bename' backend. 'bename' backend must exist before creating the mapping tree entry. If a 'parent' is provided that means that we are creating a sub-suffix mapping tree. @param suffix - suffix mapped by this mapping tree entry. It will be the common name ('cn') of the entry @param benamebase - backend common name (e.g. 'userRoot') @param parent - if provided is a parent suffix of 'suffix' @return DN of the mapping tree entry @raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping tree does not exist ValueError - if missing a parameter, ''' # Check suffix is provided if not suffix: raise ValueError("suffix is mandatory") else: nsuffix = normalizeDN(suffix) # Check backend name is provided if not bename: raise ValueError("backend name is mandatory") # Check that if the parent suffix is provided then # it exists a mapping tree for it if parent: nparent = normalizeDN(parent) filt = suffixfilt(parent) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) pass except NoSuchEntryError: raise ValueError("parent suffix has no mapping tree") else: nparent = "" # Check if suffix exists, return filt = suffixfilt(suffix) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) return entry except ldap.NO_SUCH_OBJECT: entry = None # # Now start the real work # # fix me when we can actually used escaped DNs dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE)) entry = Entry(dn) entry.update({ 'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE], 'nsslapd-state': 'backend', # the value in the dn has to be DN escaped # internal code will add the quoted value - unquoted value is # useful for searching. MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix, MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename }) # possibly add the parent if parent: entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent) try: self.log.debug("Creating entry: %s", entry.dn) self.log.info("Entry %r", entry) > self.conn.add_s(entry) /usr/local/lib/python3.9/site-packages/lib389/mappingTree.py:154: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree ,) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x5628dddabd70, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 169,...neno=187, function='_multicall', code_context=[' res = hook_impl.function(*args)\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628dd2ae940, file '/usr/local/lib/python3.9/site-packages/lib389/mappingTree.py', line 15.../lib389/mappingTree.py', lineno=154, function='create', code_context=[' self.conn.add_s(entry)\n'], index=0) ent = dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): > return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99977c550> dn = 'cn="dc=test,dc=com",cn=mapping tree,cn=config' modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])] def add_s(self,dn,modlist): > return self.add_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:439: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('cn="dc=test,dc=com",cn=mapping tree,cn=config', [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])], None, None) kwargs = {}, ent = 'cn="dc=test,dc=com",cn=mapping tree,cn=config' def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:171: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99977c550> dn = 'cn="dc=test,dc=com",cn=mapping tree,cn=config' modlist = [('objectclass', [b'top', b'extensibleObject', b'nsMappingTree']), ('nsslapd-state', [b'backend']), ('cn', [b'dc=test,dc=com']), ('nsslapd-backend', [b'test'])] serverctrls = None, clientctrls = None def add_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.add_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (4,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99977c550>, msgid = 4, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (4, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99977c550>, msgid = 4, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff99f318690>, 4, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99977c550> func = <built-in method result4 of LDAP object at 0x7ff99f318690> args = (4, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.UNWILLING_TO_PERFORM'> exc_value = UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []}) exc_traceback = <traceback object at 0x7ff999658dc0> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99977c550> func = <built-in method result4 of LDAP object at 0x7ff99f318690> args = (4, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM During handling of the above exception, another exception occurred: topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99977c6d0> def test_ticket49287(topology_m2): """ test case for memberof and conflict entries """ # return M1 = topology_m2.ms["supplier1"] M2 = topology_m2.ms["supplier2"] config_memberof(M1) config_memberof(M2) _enable_spec_logging(M1) _enable_spec_logging(M2) _disable_nunc_stans(M1) _disable_nunc_stans(M2) M1.restart(timeout=10) M2.restart(timeout=10) testbase = 'dc=test,dc=com' bename = 'test' > create_backend(M1, M2, testbase, bename) /export/tests/tickets/ticket49287_test.py:282: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket49287_test.py:204: in create_backend s1.mappingtree.create(beSuffix, beName) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.mappingTree.MappingTreeLegacy object at 0x7ff9992a0250> suffix = 'dc=test,dc=com', bename = 'test', parent = None def create(self, suffix=None, bename=None, parent=None): ''' Create a mapping tree entry (under "cn=mapping tree,cn=config"), for the 'suffix' and that is stored in 'bename' backend. 'bename' backend must exist before creating the mapping tree entry. If a 'parent' is provided that means that we are creating a sub-suffix mapping tree. @param suffix - suffix mapped by this mapping tree entry. It will be the common name ('cn') of the entry @param benamebase - backend common name (e.g. 'userRoot') @param parent - if provided is a parent suffix of 'suffix' @return DN of the mapping tree entry @raise ldap.NO_SUCH_OBJECT - if the backend entry or parent mapping tree does not exist ValueError - if missing a parameter, ''' # Check suffix is provided if not suffix: raise ValueError("suffix is mandatory") else: nsuffix = normalizeDN(suffix) # Check backend name is provided if not bename: raise ValueError("backend name is mandatory") # Check that if the parent suffix is provided then # it exists a mapping tree for it if parent: nparent = normalizeDN(parent) filt = suffixfilt(parent) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) pass except NoSuchEntryError: raise ValueError("parent suffix has no mapping tree") else: nparent = "" # Check if suffix exists, return filt = suffixfilt(suffix) try: entry = self.conn.getEntry(DN_MAPPING_TREE, ldap.SCOPE_SUBTREE, filt) return entry except ldap.NO_SUCH_OBJECT: entry = None # # Now start the real work # # fix me when we can actually used escaped DNs dn = ','.join(('cn="%s"' % nsuffix, DN_MAPPING_TREE)) entry = Entry(dn) entry.update({ 'objectclass': ['top', 'extensibleObject', MT_OBJECTCLASS_VALUE], 'nsslapd-state': 'backend', # the value in the dn has to be DN escaped # internal code will add the quoted value - unquoted value is # useful for searching. MT_PROPNAME_TO_ATTRNAME[MT_SUFFIX]: nsuffix, MT_PROPNAME_TO_ATTRNAME[MT_BACKEND]: bename }) # possibly add the parent if parent: entry.setValues(MT_PROPNAME_TO_ATTRNAME[MT_PARENT_SUFFIX], nparent) try: self.log.debug("Creating entry: %s", entry.dn) self.log.info("Entry %r", entry) self.conn.add_s(entry) except ldap.LDAPError as e: > raise ldap.LDAPError("Error adding suffix entry " + dn, e) E ldap.LDAPError: ('Error adding suffix entry cn="dc=test,dc=com",cn=mapping tree,cn=config', UNWILLING_TO_PERFORM({'msgtype': 105, 'msgid': 4, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []})) /usr/local/lib/python3.9/site-packages/lib389/mappingTree.py:156: LDAPError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6cdf02b3-0fd4-476d-baf3-558bf727b87c / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 13f03046-956f-40e4-afdd-b3527d16cbbc / got description=6cdf02b3-0fd4-476d-baf3-558bf727b87c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49287_test:ticket49287_test.py:77 update cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO tests.tickets.ticket49287_test:ticket49287_test.py:77 update cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO lib389:mappingTree.py:153 Entry dn: cn="dc=test,dc=com",cn=mapping tree,cn=config cn: dc=test,dc=com nsslapd-backend: test nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree | |||
Failed | tickets/ticket49303_test.py::test_ticket49303 | 26.02 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff9992b5910> def test_ticket49303(topo): """ Test the nsTLSAllowClientRenegotiation setting. """ sslport = SECUREPORT_STANDALONE1 log.info("Ticket 49303 - Allow disabling of SSL renegotiation") # No value set, defaults to reneg allowed enable_ssl(topo.standalone, sslport) > assert try_reneg(HOST_STANDALONE1, sslport) is True E AssertionError: assert False is True E + where False = try_reneg('LOCALHOST', 63601) /export/tests/tickets/ticket49303_test.py:88: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49303_test:ticket49303_test.py:84 Ticket 49303 - Allow disabling of SSL renegotiation | |||
Failed | tickets/ticket49412_test.py::test_ticket49412 | 25.51 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff999ae98e0> def test_ticket49412(topo): """Specify a test case purpose or name here :id: 4c7681ff-0511-4256-9589-bdcad84c13e6 :setup: Fill in set up configuration here :steps: 1. Fill in test case steps here 2. And indent them like this (RST format requirement) :expectedresults: 1. Fill in the result that is expected 2. For each test step """ M1 = topo.ms["supplier1"] # wrong call with invalid value (should be str(60) # that create replace with NULL value # it should fail with UNWILLING_TO_PERFORM try: > M1.modify_s(CHANGELOG, [(ldap.MOD_REPLACE, MAXAGE_ATTR, 60), (ldap.MOD_REPLACE, TRIMINTERVAL, 10)]) /export/tests/tickets/ticket49412_test.py:44: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: in modify_s return self.modify_ext_s(dn,modlist,None,None) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9998651c0> func = <built-in method result4 of LDAP object at 0x7ff9991adb70> args = (39, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.NO_SUCH_OBJECT: {'msgtype': 103, 'msgid': 39, 'result': 32, 'desc': 'No such object', 'ctrls': []} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: NO_SUCH_OBJECT -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect e5dd14b1-3ce8-46aa-9f20-6752edeb3c14 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists | |||
Failed | tickets/ticket49463_test.py::test_ticket_49463 | 255.36 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99961a610> def test_ticket_49463(topo): """Specify a test case purpose or name here :id: 2a68e8be-387d-4ac7-9452-1439e8483c13 :setup: Fill in set up configuration here :steps: 1. Enable fractional replication 2. Enable replication logging 3. Check that replication is working fine 4. Generate skipped updates to create keep alive entries 5. Remove M3 from the topology 6. issue cleanAllRuv FORCE that will run on M1 then propagated M2 and M4 7. Check that Number DEL keep alive '3' is <= 1 8. Check M1 is the originator of cleanAllRuv and M2/M4 the propagated ones 9. Check replication M1,M2 and M4 can recover 10. Remove M4 from the topology 11. Issue cleanAllRuv not force while M2 is stopped (that hangs the cleanAllRuv) 12. Check that nsds5ReplicaCleanRUV is correctly encoded on M1 (last value: 1) 13. Check that nsds5ReplicaCleanRUV encoding survives M1 restart 14. Check that nsds5ReplicaCleanRUV encoding is valid on M2 (last value: 0) 15. Check that (for M4 cleanAllRUV) M1 is Originator and M2 propagation :expectedresults: 1. No report of failure when the RUV is updated """ # Step 1 - Configure fractional (skip telephonenumber) replication M1 = topo.ms["supplier1"] M2 = topo.ms["supplier2"] M3 = topo.ms["supplier3"] M4 = topo.ms["supplier4"] repl = ReplicationManager(DEFAULT_SUFFIX) fractional_server_to_replica(M1, M2) fractional_server_to_replica(M1, M3) fractional_server_to_replica(M1, M4) fractional_server_to_replica(M2, M1) fractional_server_to_replica(M2, M3) fractional_server_to_replica(M2, M4) fractional_server_to_replica(M3, M1) fractional_server_to_replica(M3, M2) fractional_server_to_replica(M3, M4) fractional_server_to_replica(M4, M1) fractional_server_to_replica(M4, M2) fractional_server_to_replica(M4, M3) # Step 2 - enable internal op logging and replication debug for i in (M1, M2, M3, M4): i.config.loglevel(vals=[256 + 4], service='access') i.config.loglevel(vals=[LOG_REPLICA, LOG_DEFAULT], service='error') # Step 3 - Check that replication is working fine add_user(M1, 11, desc="add to M1") add_user(M2, 21, desc="add to M2") add_user(M3, 31, desc="add to M3") add_user(M4, 41, desc="add to M4") for i in (M1, M2, M3, M4): for j in (M1, M2, M3, M4): if i == j: continue repl.wait_for_replication(i, j) # Step 4 - Generate skipped updates to create keep alive entries for i in (M1, M2, M3, M4): cn = '%s_%d' % (USER_CN, 11) dn = 'uid=%s,ou=People,%s' % (cn, SUFFIX) users = UserAccount(i, dn) for j in range(110): users.set('telephoneNumber', str(j)) # Step 5 - Remove M3 from the topology M3.stop() M1.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port) M2.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port) M4.agreement.delete(suffix=SUFFIX, consumer_host=M3.host, consumer_port=M3.port) # Step 6 - Then issue cleanAllRuv FORCE that will run on M1, M2 and M4 M1.tasks.cleanAllRUV(suffix=SUFFIX, replicaid='3', force=True, args={TASK_WAIT: True}) # Step 7 - Count the number of received DEL of the keep alive 3 for i in (M1, M2, M4): i.restart() regex = re.compile(".*DEL dn=.cn=repl keep alive 3.*") for i in (M1, M2, M4): count = count_pattern_accesslog(M1, regex) log.debug("count on %s = %d" % (i, count)) # check that DEL is replicated once (If DEL is kept in the fix) # check that DEL is is not replicated (If DEL is finally no long done in the fix) assert ((count == 1) or (count == 0)) # Step 8 - Check that M1 is Originator of cleanAllRuv and M2, M4 propagation regex = re.compile(".*Original task deletes Keep alive entry .3.*") assert pattern_errorlog(M1, regex) regex = re.compile(".*Propagated task does not delete Keep alive entry .3.*") assert pattern_errorlog(M2, regex) assert pattern_errorlog(M4, regex) # Step 9 - Check replication M1,M2 and M4 can recover add_user(M1, 12, desc="add to M1") add_user(M2, 22, desc="add to M2") for i in (M1, M2, M4): for j in (M1, M2, M4): if i == j: continue repl.wait_for_replication(i, j) # Step 10 - Remove M4 from the topology M4.stop() M1.agreement.delete(suffix=SUFFIX, consumer_host=M4.host, consumer_port=M4.port) M2.agreement.delete(suffix=SUFFIX, consumer_host=M4.host, consumer_port=M4.port) # Step 11 - Issue cleanAllRuv not force while M2 is stopped (that hangs the cleanAllRuv) M2.stop() M1.tasks.cleanAllRUV(suffix=SUFFIX, replicaid='4', force=False, args={TASK_WAIT: False}) # Step 12 # CleanAllRuv is hanging waiting for M2 to restart # Check that nsds5ReplicaCleanRUV is correctly encoded on M1 replicas = Replicas(M1) replica = replicas.list()[0] time.sleep(0.5) replica.present('nsds5ReplicaCleanRUV') log.info("M1: nsds5ReplicaCleanRUV=%s" % replica.get_attr_val_utf8('nsds5replicacleanruv')) regex = re.compile("^4:.*:no:1$") > assert regex.match(replica.get_attr_val_utf8('nsds5replicacleanruv')) E AssertionError: assert None E + where None = <built-in method match of re.Pattern object at 0x7ff99963c8f0>('4:no:1:dc=example,dc=com') E + where <built-in method match of re.Pattern object at 0x7ff99963c8f0> = re.compile('^4:.*:no:1$').match E + and '4:no:1:dc=example,dc=com' = <bound method DSLdapObject.get_attr_val_utf8 of <lib389.replica.Replica object at 0x7ff999300a00>>('nsds5replicacleanruv') E + where <bound method DSLdapObject.get_attr_val_utf8 of <lib389.replica.Replica object at 0x7ff999300a00>> = <lib389.replica.Replica object at 0x7ff999300a00>.get_attr_val_utf8 /export/tests/tickets/ticket49463_test.py:188: AssertionError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier4 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'supplier4', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 56b0154b-b51e-4bf8-ae2c-a52a07d8648f / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 69673972-05a6-467e-8cd7-7ee3bfb7a0c0 / got description=56b0154b-b51e-4bf8-ae2c-a52a07d8648f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 76555de8-c4a7-4018-b54e-9ee9c9073252 / got description=69673972-05a6-467e-8cd7-7ee3bfb7a0c0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 18dfe4f6-9ebd-499e-b008-18c439677278 / got description=76555de8-c4a7-4018-b54e-9ee9c9073252) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:156 Joining supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 92b68d32-01dd-43a0-ab62-4d48b96d6261 / got description=18dfe4f6-9ebd-499e-b008-18c439677278) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5b9190e-e9fd-4d15-a16d-4490c957a225 / got description=92b68d32-01dd-43a0-ab62-4d48b96d6261) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5b9190e-e9fd-4d15-a16d-4490c957a225 / got description=92b68d32-01dd-43a0-ab62-4d48b96d6261) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5b9190e-e9fd-4d15-a16d-4490c957a225 / got description=92b68d32-01dd-43a0-ab62-4d48b96d6261) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5b9190e-e9fd-4d15-a16d-4490c957a225 / got description=92b68d32-01dd-43a0-ab62-4d48b96d6261) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5b9190e-e9fd-4d15-a16d-4490c957a225 / got description=92b68d32-01dd-43a0-ab62-4d48b96d6261) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b5b9190e-e9fd-4d15-a16d-4490c957a225 / got description=92b68d32-01dd-43a0-ab62-4d48b96d6261) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier4 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d7c44a90-77c7-4e28-92d0-70a2f569f8c1 / got description=b5b9190e-e9fd-4d15-a16d-4490c957a225) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 44e33461-f1c7-4a59-ab65-dad4e0508d58 / got description=d7c44a90-77c7-4e28-92d0-70a2f569f8c1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3dbd2979-6353-411a-be2c-7acd52a8daf7 / got description=44e33461-f1c7-4a59-ab65-dad4e0508d58) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3dbd2979-6353-411a-be2c-7acd52a8daf7 / got description=44e33461-f1c7-4a59-ab65-dad4e0508d58) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3dbd2979-6353-411a-be2c-7acd52a8daf7 / got description=44e33461-f1c7-4a59-ab65-dad4e0508d58) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 3dbd2979-6353-411a-be2c-7acd52a8daf7 / got description=44e33461-f1c7-4a59-ab65-dad4e0508d58) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4bf8a827-e34c-4449-89a6-4d6efa08879b / got description=3dbd2979-6353-411a-be2c-7acd52a8daf7) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4bf8a827-e34c-4449-89a6-4d6efa08879b / got description=3dbd2979-6353-411a-be2c-7acd52a8daf7) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4bf8a827-e34c-4449-89a6-4d6efa08879b / got description=3dbd2979-6353-411a-be2c-7acd52a8daf7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 16cc1287-6c66-450c-9b19-03ee6f8f687a / got description=4bf8a827-e34c-4449-89a6-4d6efa08879b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 068eae70-0f09-4092-8059-cf6740ad1d01 / got description=16cc1287-6c66-450c-9b19-03ee6f8f687a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2482e395-36e3-4f2f-a349-e4a5ae748494 / got description=068eae70-0f09-4092-8059-cf6740ad1d01) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c15521cc-1e27-4e55-bbb9-64c5664a92fe / got description=2482e395-36e3-4f2f-a349-e4a5ae748494) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect de2d52fe-a6a9-48c7-a8f6-fa2a9ac98afc / got description=c15521cc-1e27-4e55-bbb9-64c5664a92fe) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ca29b574-59e3-477c-b824-d98240e8c16e / got description=de2d52fe-a6a9-48c7-a8f6-fa2a9ac98afc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 00f9f05f-f207-464a-821a-730f7ed87d93 / got description=ca29b574-59e3-477c-b824-d98240e8c16e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect af9fc877-b0a6-42fe-b542-3a39e1f26a3c / got description=00f9f05f-f207-464a-821a-730f7ed87d93) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389:agreement.py:1094 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:agreement.py:1094 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:agreement.py:1094 Agreement (cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:tasks.py:1475 cleanAllRUV task (task-06052021_020242) completed successfully INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 64dad632-de55-4755-bdc1-bb1c2bc960e5 / got description=af9fc877-b0a6-42fe-b542-3a39e1f26a3c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 785e3d88-b39f-45fd-99cd-fe036b9141ce / got description=64dad632-de55-4755-bdc1-bb1c2bc960e5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect df04606f-a071-4f91-bad5-543515c363a6 / got description=785e3d88-b39f-45fd-99cd-fe036b9141ce) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect deb3c4ef-7db1-4236-a909-18cc004b07df / got description=df04606f-a071-4f91-bad5-543515c363a6) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect deb3c4ef-7db1-4236-a909-18cc004b07df / got description=df04606f-a071-4f91-bad5-543515c363a6) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect deb3c4ef-7db1-4236-a909-18cc004b07df / got description=df04606f-a071-4f91-bad5-543515c363a6) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect deb3c4ef-7db1-4236-a909-18cc004b07df / got description=df04606f-a071-4f91-bad5-543515c363a6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f9bd06fd-b00c-4532-8294-a54e98cc88bb / got description=deb3c4ef-7db1-4236-a909-18cc004b07df) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7cc5ac76-7b8a-4504-b888-39532e941a76 / got description=f9bd06fd-b00c-4532-8294-a54e98cc88bb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389:agreement.py:1094 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:agreement.py:1094 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:tasks.py:1475 cleanAllRUV task (task-06052021_020328) completed successfully INFO lib389.utils:ticket49463_test.py:186 M1: nsds5ReplicaCleanRUV=4:no:1:dc=example,dc=com | |||
Failed | tickets/ticket50232_test.py::test_ticket50232_normal | 8.80 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff998fdc5e0> def test_ticket50232_normal(topology_st): """ The fix for ticket 50232 The test sequence is: - create suffix - add suffix entry and some child entries - "normally" done after populating suffix: enable replication - get RUV and database generation - export -r - import - get RUV and database generation - assert database generation has not changed """ log.info('Testing Ticket 50232 - export creates not imprtable ldif file, normal creation order') topology_st.standalone.backend.create(NORMAL_SUFFIX, {BACKEND_NAME: NORMAL_BACKEND_NAME}) topology_st.standalone.mappingtree.create(NORMAL_SUFFIX, bename=NORMAL_BACKEND_NAME, parent=None) _populate_suffix(topology_st.standalone, NORMAL_BACKEND_NAME) repl = ReplicationManager(DEFAULT_SUFFIX) > repl._ensure_changelog(topology_st.standalone) /export/tests/tickets/ticket50232_test.py:113: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/replica.py:1928: in _ensure_changelog cl.create(properties={ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:972: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:947: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff998fdc8e0> func = <built-in method result4 of LDAP object at 0x7ff998efbe70> args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 13, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=o=normal INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=normal,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=normal,cn=ldbm database,cn=plugins,cn=config cn: normal nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/normal nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=normal objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=normal",cn=mapping tree,cn=config cn: o=normal nsslapd-backend: normal nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dnormal,cn=mapping tree,cn=config cn: o=normal nsslapd-backend: normal nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Failed | tickets/ticket50232_test.py::test_ticket50232_reverse | 1.15 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff998fdc5e0> def test_ticket50232_reverse(topology_st): """ The fix for ticket 50232 The test sequence is: - create suffix - enable replication before suffix enztry is added - add suffix entry and some child entries - get RUV and database generation - export -r - import - get RUV and database generation - assert database generation has not changed """ log.info('Testing Ticket 50232 - export creates not imprtable ldif file, normal creation order') # # Setup Replication # log.info('Setting up replication...') repl = ReplicationManager(DEFAULT_SUFFIX) # repl.create_first_supplier(topology_st.standalone) # # enable dynamic plugins, memberof and retro cl plugin # topology_st.standalone.backend.create(REVERSE_SUFFIX, {BACKEND_NAME: REVERSE_BACKEND_NAME}) topology_st.standalone.mappingtree.create(REVERSE_SUFFIX, bename=REVERSE_BACKEND_NAME, parent=None) > _enable_replica(topology_st.standalone, REVERSE_SUFFIX) /export/tests/tickets/ticket50232_test.py:155: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /export/tests/tickets/ticket50232_test.py:35: in _enable_replica repl._ensure_changelog(instance) /usr/local/lib/python3.9/site-packages/lib389/replica.py:1928: in _ensure_changelog cl.create(properties={ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:972: in create return self._create(rdn, properties, basedn, ensure=False) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:947: in _create self._instance.add_ext_s(e, serverctrls=self._server_controls, clientctrls=self._client_controls, escapehatch='i am sure') /usr/local/lib/python3.9/site-packages/lib389/__init__.py:169: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:425: in add_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff998fdc8e0> func = <built-in method result4 of LDAP object at 0x7ff998efbe70> args = (22, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.UNWILLING_TO_PERFORM: {'msgtype': 105, 'msgid': 22, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': [], 'info': 'Changelog configuration is part of the backend configuration'} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: UNWILLING_TO_PERFORM -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=o=reverse INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=reverse,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=reverse,cn=ldbm database,cn=plugins,cn=config cn: reverse nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/reverse nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=reverse objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=reverse",cn=mapping tree,cn=config cn: o=reverse nsslapd-backend: reverse nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dreverse,cn=mapping tree,cn=config cn: o=reverse nsslapd-backend: reverse nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Failed | tickets/ticket548_test.py::test_ticket548_test_with_no_policy | 8.21 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff9990e4820> def test_ticket548_test_with_no_policy(topology_st): """ Check shadowAccount under no password policy """ log.info("Case 1. No password policy") log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) log.info('Add an entry' + USER1_DN) try: topology_st.standalone.add_s( Entry((USER1_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(), 'sn': '1', 'cn': 'user 1', 'uid': 'user1', 'givenname': 'user', 'mail': 'user1@' + DEFAULT_SUFFIX, 'userpassword': USER_PW}))) except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to add user' + USER1_DN + ': error ' + e.message['desc']) assert False edate = int(time.time() / (60 * 60 * 24)) log.info('Search entry %s' % USER1_DN) log.info("Bind as %s" % USER1_DN) topology_st.standalone.simple_bind_s(USER1_DN, USER_PW) > entry = topology_st.standalone.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)", ['shadowLastChange']) /export/tests/tickets/ticket548_test.py:211: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0> args = ('uid=user1,dc=example,dc=com', 0, '(objectclass=*)', ['shadowLastChange']) kwargs = {}, res = 6, restype = 101, obj = [] def getEntry(self, *args, **kwargs): """Wrapper around SimpleLDAPObject.search. It is common to just get one entry. @param - entry dn @param - search scope, in ldap.SCOPE_BASE (default), ldap.SCOPE_SUB, ldap.SCOPE_ONE @param filterstr - filterstr, default '(objectClass=*)' from SimpleLDAPObject @param attrlist - list of attributes to retrieve. eg ['cn', 'uid'] @oaram attrsonly - default None from SimpleLDAPObject eg. getEntry(dn, scope, filter, attributes) XXX This cannot return None """ self.log.debug("Retrieving entry with %r", [args]) if len(args) == 1 and 'scope' not in kwargs: args += (ldap.SCOPE_BASE, ) res = self.search(*args, **kwargs) restype, obj = self.result(res) # TODO: why not test restype? if not obj: > raise NoSuchEntryError("no such entry for %r", [args]) E lib389.exceptions.NoSuchEntryError: ('no such entry for %r', [('uid=user1,dc=example,dc=com', 0, '(objectclass=*)', ['shadowLastChange'])]) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:1697: NoSuchEntryError -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Failed | tickets/ticket548_test.py::test_ticket548_test_global_policy | 0.21 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff9990e4820> def test_ticket548_test_global_policy(topology_st): """ Check shadowAccount with global password policy """ log.info("Case 2. Check shadowAccount with global password policy") log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) set_global_pwpolicy(topology_st) log.info('Add an entry' + USER2_DN) try: topology_st.standalone.add_s( Entry((USER2_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(), 'sn': '2', 'cn': 'user 2', 'uid': 'user2', 'givenname': 'user', 'mail': 'user2@' + DEFAULT_SUFFIX, 'userpassword': USER_PW}))) except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to add user' + USER2_DN + ': error ' + e.message['desc']) assert False edate = int(time.time() / (60 * 60 * 24)) log.info("Bind as %s" % USER1_DN) topology_st.standalone.simple_bind_s(USER1_DN, USER_PW) log.info('Search entry %s' % USER1_DN) > entry = topology_st.standalone.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)") /export/tests/tickets/ticket548_test.py:249: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0> args = ('uid=user1,dc=example,dc=com', 0, '(objectclass=*)'), kwargs = {} res = 15, restype = 101, obj = [] def getEntry(self, *args, **kwargs): """Wrapper around SimpleLDAPObject.search. It is common to just get one entry. @param - entry dn @param - search scope, in ldap.SCOPE_BASE (default), ldap.SCOPE_SUB, ldap.SCOPE_ONE @param filterstr - filterstr, default '(objectClass=*)' from SimpleLDAPObject @param attrlist - list of attributes to retrieve. eg ['cn', 'uid'] @oaram attrsonly - default None from SimpleLDAPObject eg. getEntry(dn, scope, filter, attributes) XXX This cannot return None """ self.log.debug("Retrieving entry with %r", [args]) if len(args) == 1 and 'scope' not in kwargs: args += (ldap.SCOPE_BASE, ) res = self.search(*args, **kwargs) restype, obj = self.result(res) # TODO: why not test restype? if not obj: > raise NoSuchEntryError("no such entry for %r", [args]) E lib389.exceptions.NoSuchEntryError: ('no such entry for %r', [('uid=user1,dc=example,dc=com', 0, '(objectclass=*)')]) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:1697: NoSuchEntryError | |||
Failed | tickets/ticket548_test.py::test_ticket548_test_subtree_policy | 3.89 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff9990e4820> user = 'uid=user3,dc=example,dc=com', passwd = 'password' newpasswd = 'password0' def update_passwd(topology_st, user, passwd, newpasswd): log.info(" Bind as {%s,%s}" % (user, passwd)) topology_st.standalone.simple_bind_s(user, passwd) try: > topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())]) /export/tests/tickets/ticket548_test.py:160: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user3,dc=example,dc=com', [(2, 'userpassword', b'password0')]) kwargs = {} c_stack = [FrameInfo(frame=<frame at 0x7ff9993ff440, file '/usr/local/lib/python3.9/site-packages/lib389/__init__.py', line 173,...mbda>', code_context=[' self._inner_hookexec = lambda hook, methods, kwargs: hook.multicall(\n'], index=0), ...] frame = FrameInfo(frame=<frame at 0x5628ddc35c60, file '/export/tests/tickets/ticket548_test.py', line 164, code update_passwd...[" topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())])\n"], index=0) def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0> dn = 'uid=user3,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')] def modify_s(self,dn,modlist): > return self.modify_ext_s(dn,modlist,None,None) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:640: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = ('uid=user3,dc=example,dc=com', [(2, 'userpassword', b'password0')], None, None) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0> dn = 'uid=user3,dc=example,dc=com' modlist = [(2, 'userpassword', b'password0')], serverctrls = None clientctrls = None def modify_ext_s(self,dn,modlist,serverctrls=None,clientctrls=None): msgid = self.modify_ext(dn,modlist,serverctrls,clientctrls) > resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (34,), kwargs = {'all': 1, 'timeout': -1} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0>, msgid = 34, all = 1 timeout = -1, resp_ctrl_classes = None def result3(self,msgid=ldap.RES_ANY,all=1,timeout=None,resp_ctrl_classes=None): > resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( msgid,all,timeout, add_ctrls=0,add_intermediates=0,add_extop=0, resp_ctrl_classes=resp_ctrl_classes ) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (34, 1, -1) kwargs = {'add_ctrls': 0, 'add_extop': 0, 'add_intermediates': 0, 'resp_ctrl_classes': None} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0>, msgid = 34, all = 1 timeout = -1, add_ctrls = 0, add_intermediates = 0, add_extop = 0 resp_ctrl_classes = None def result4(self,msgid=ldap.RES_ANY,all=1,timeout=None,add_ctrls=0,add_intermediates=0,add_extop=0,resp_ctrl_classes=None): if timeout is None: timeout = self.timeout > ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ args = (<built-in method result4 of LDAP object at 0x7ff998e6eb40>, 34, 1, -1, 0, 0, ...) kwargs = {} def inner(*args, **kwargs): if name in [ 'add_s', 'bind_s', 'delete_s', 'modify_s', 'modrdn_s', 'rename_s', 'sasl_interactive_bind_s', 'search_s', 'search_ext_s', 'simple_bind_s', 'unbind_s', 'getEntry', ] and not ('escapehatch' in kwargs and kwargs['escapehatch'] == 'i am sure'): c_stack = inspect.stack() frame = c_stack[1] warnings.warn(DeprecationWarning("Use of raw ldap function %s. This will be removed in a future release. " "Found in: %s:%s" % (name, frame.filename, frame.lineno))) # Later, we will add a sleep here to make it even more painful. # Finally, it will raise an exception. elif 'escapehatch' in kwargs: kwargs.pop('escapehatch') if name == 'result': objtype, data = f(*args, **kwargs) # data is either a 2-tuple or a list of 2-tuples # print data if data: if isinstance(data, tuple): return objtype, Entry(data) elif isinstance(data, list): # AD sends back these search references # if objtype == ldap.RES_SEARCH_RESULT and \ # isinstance(data[-1],tuple) and \ # not data[-1][0]: # print "Received search reference: " # pprint.pprint(data[-1][1]) # data.pop() # remove the last non-entry element return objtype, [Entry(x) for x in data] else: raise TypeError("unknown data type %s returned by result" % type(data)) else: return objtype, data elif name.startswith('add'): # the first arg is self # the second and third arg are the dn and the data to send # We need to convert the Entry into the format used by # python-ldap ent = args[0] if isinstance(ent, Entry): return f(ent.dn, ent.toTupleList(), *args[2:]) else: return f(*args, **kwargs) else: > return f(*args, **kwargs) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0> func = <built-in method result4 of LDAP object at 0x7ff998e6eb40> args = (34, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: result = func(*args,**kwargs) if __debug__ and self._trace_level>=2: if func.__name__!="unbind_ext": diagnostic_message_success = self._l.get_option(ldap.OPT_DIAGNOSTIC_MESSAGE) finally: self._ldap_object_lock.release() except LDAPError as e: exc_type,exc_value,exc_traceback = sys.exc_info() try: if 'info' not in e.args[0] and 'errno' in e.args[0]: e.args[0]['info'] = strerror(e.args[0]['errno']) except IndexError: pass if __debug__ and self._trace_level>=2: self._trace_file.write('=> LDAPError - %s: %s\n' % (e.__class__.__name__,str(e))) try: > reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ exc_type = <class 'ldap.INSUFFICIENT_ACCESS'> exc_value = INSUFFICIENT_ACCESS({'msgtype': 103, 'msgid': 34, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user3,dc=example,dc=com'.\n"}) exc_traceback = <traceback object at 0x7ff99904d640> def reraise(exc_type, exc_value, exc_traceback): """Re-raise an exception given information from sys.exc_info() Note that unlike six.reraise, this does not support replacing the traceback. All arguments must come from a single sys.exc_info() call. """ # In Python 3, all exception info is contained in one object. > raise exc_value /usr/lib64/python3.9/site-packages/ldap/compat.py:46: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff9990e48e0> func = <built-in method result4 of LDAP object at 0x7ff998e6eb40> args = (34, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 34, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'userPassword' attribute of entry 'uid=user3,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS During handling of the above exception, another exception occurred: topology_st = <lib389.topologies.TopologyMain object at 0x7ff9990e4820> def test_ticket548_test_subtree_policy(topology_st): """ Check shadowAccount with subtree level password policy """ log.info("Case 3. Check shadowAccount with subtree level password policy") log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) # Check the global policy values set_subtree_pwpolicy(topology_st, 2, 20, 6) log.info('Add an entry' + USER3_DN) try: topology_st.standalone.add_s( Entry((USER3_DN, {'objectclass': "top person organizationalPerson inetOrgPerson shadowAccount".split(), 'sn': '3', 'cn': 'user 3', 'uid': 'user3', 'givenname': 'user', 'mail': 'user3@' + DEFAULT_SUFFIX, 'userpassword': USER_PW}))) except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to add user' + USER3_DN + ': error ' + e.message['desc']) assert False log.info('Search entry %s' % USER3_DN) entry0 = topology_st.standalone.getEntry(USER3_DN, ldap.SCOPE_BASE, "(objectclass=*)") log.info('Expecting shadowLastChange 0 since passwordMustChange is on') check_shadow_attr_value(entry0, 'shadowLastChange', 0, USER3_DN) # passwordMinAge -- 2 day check_shadow_attr_value(entry0, 'shadowMin', 2, USER3_DN) # passwordMaxAge -- 20 days check_shadow_attr_value(entry0, 'shadowMax', 20, USER3_DN) # passwordWarning -- 6 days check_shadow_attr_value(entry0, 'shadowWarning', 6, USER3_DN) log.info("Bind as %s" % USER3_DN) topology_st.standalone.simple_bind_s(USER3_DN, USER_PW) log.info('Search entry %s' % USER3_DN) try: entry1 = topology_st.standalone.getEntry(USER3_DN, ldap.SCOPE_BASE, "(objectclass=*)") except ldap.UNWILLING_TO_PERFORM: log.info('test_ticket548: Search by' + USER3_DN + ' failed by UNWILLING_TO_PERFORM as expected') except ldap.LDAPError as e: log.fatal('test_ticket548: Failed to serch user' + USER3_DN + ' by self: error ' + e.message['desc']) assert False log.info("Bind as %s and updating the password with a new one" % USER3_DN) topology_st.standalone.simple_bind_s(USER3_DN, USER_PW) # Bind as DM again, change policy log.info("Bind as %s" % DN_DM) topology_st.standalone.simple_bind_s(DN_DM, PASSWORD) set_subtree_pwpolicy(topology_st, 4, 40, 12) newpasswd = USER_PW + '0' > update_passwd(topology_st, USER3_DN, USER_PW, newpasswd) /export/tests/tickets/ticket548_test.py:372: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology_st = <lib389.topologies.TopologyMain object at 0x7ff9990e4820> user = 'uid=user3,dc=example,dc=com', passwd = 'password' newpasswd = 'password0' def update_passwd(topology_st, user, passwd, newpasswd): log.info(" Bind as {%s,%s}" % (user, passwd)) topology_st.standalone.simple_bind_s(user, passwd) try: topology_st.standalone.modify_s(user, [(ldap.MOD_REPLACE, 'userpassword', newpasswd.encode())]) except ldap.LDAPError as e: > log.fatal('test_ticket548: Failed to update the password ' + cpw + ' of user ' + user + ': error ' + e.message[ 'desc']) E NameError: name 'cpw' is not defined /export/tests/tickets/ticket548_test.py:162: NameError | |||
XFailed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_targattrfilters_18] | 7.83 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99d10ed00> real_value = '(target = ldap:///cn=Jeff Vedder,ou=Product Development,dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of ...3123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123123";)' @pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473') @pytest.mark.parametrize("real_value", [a[1] for a in FAILED], ids=[a[0] for a in FAILED]) def test_aci_invalid_syntax_fail(topo, real_value): """ Try to set wrong ACI syntax. :id: 83c40784-fff5-49c8-9535-7064c9c19e7e :parametrized: yes :setup: Standalone Instance :steps: 1. Create ACI 2. Try to setup the ACI with Instance :expectedresults: 1. It should pass 2. It should not pass """ domain = Domain(topo.standalone, DEFAULT_SUFFIX) with pytest.raises(ldap.INVALID_SYNTAX): > domain.add("aci", real_value) E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'> suites/acl/syntax_test.py:213: Failed -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
XFailed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_targattrfilters_20] | 0.24 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99d10ed00> real_value = '(target = ldap:///cn=Jeff Vedder,ou=Product Development,dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of the ACI"; deny(write)userdns="ldap:///anyone";)' @pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473') @pytest.mark.parametrize("real_value", [a[1] for a in FAILED], ids=[a[0] for a in FAILED]) def test_aci_invalid_syntax_fail(topo, real_value): """ Try to set wrong ACI syntax. :id: 83c40784-fff5-49c8-9535-7064c9c19e7e :parametrized: yes :setup: Standalone Instance :steps: 1. Create ACI 2. Try to setup the ACI with Instance :expectedresults: 1. It should pass 2. It should not pass """ domain = Domain(topo.standalone, DEFAULT_SUFFIX) with pytest.raises(ldap.INVALID_SYNTAX): > domain.add("aci", real_value) E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'> suites/acl/syntax_test.py:213: Failed | |||
XFailed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_bind_rule_set_with_more_than_three] | 0.07 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99d10ed00> real_value = '(target = ldap:///dc=example,dc=com)(targetattr="*")(version 3.0; acl "Name of the ACI"; deny absolute (all)userdn="ldap:////////anyone";)' @pytest.mark.xfail(reason='https://bugzilla.redhat.com/show_bug.cgi?id=1691473') @pytest.mark.parametrize("real_value", [a[1] for a in FAILED], ids=[a[0] for a in FAILED]) def test_aci_invalid_syntax_fail(topo, real_value): """ Try to set wrong ACI syntax. :id: 83c40784-fff5-49c8-9535-7064c9c19e7e :parametrized: yes :setup: Standalone Instance :steps: 1. Create ACI 2. Try to setup the ACI with Instance :expectedresults: 1. It should pass 2. It should not pass """ domain = Domain(topo.standalone, DEFAULT_SUFFIX) with pytest.raises(ldap.INVALID_SYNTAX): > domain.add("aci", real_value) E Failed: DID NOT RAISE <class 'ldap.INVALID_SYNTAX'> suites/acl/syntax_test.py:213: Failed | |||
XFailed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_3, CHILDREN)] | 0.12 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99ffd94c0> _add_user = None, user = 'uid=Grandparent,ou=Inheritance,dc=example,dc=com' entry = 'ou=CHILDREN,ou=PARENTS,ou=GRANDPARENTS,ou=ANCESTORS,ou=Inheritance,dc=example,dc=com' @pytest.mark.parametrize("user,entry", [ (CAN, ROLEDNACCESS), (CAN, USERDNACCESS), (CAN, GROUPDNACCESS), (CAN, LDAPURLACCESS), (CAN, ATTRNAMEACCESS), (LEVEL_0, OU_2), (LEVEL_1, ANCESTORS), (LEVEL_2, GRANDPARENTS), (LEVEL_4, OU_2), (LEVEL_4, ANCESTORS), (LEVEL_4, GRANDPARENTS), (LEVEL_4, PARENTS), (LEVEL_4, CHILDREN), pytest.param(LEVEL_3, CHILDREN, marks=pytest.mark.xfail(reason="May be some bug")), ], ids=[ "(CAN,ROLEDNACCESS)", "(CAN,USERDNACCESS)", "(CAN,GROUPDNACCESS)", "(CAN,LDAPURLACCESS)", "(CAN,ATTRNAMEACCESS)", "(LEVEL_0, OU_2)", "(LEVEL_1,ANCESTORS)", "(LEVEL_2,GRANDPARENTS)", "(LEVEL_4,OU_2)", "(LEVEL_4, ANCESTORS)", "(LEVEL_4,GRANDPARENTS)", "(LEVEL_4,PARENTS)", "(LEVEL_4,CHILDREN)", "(LEVEL_3, CHILDREN)" ]) def test_mod_see_also_positive(topo, _add_user, user, entry): """ Try to set seeAlso on entry with binding specific user, it will success as per the ACI. :id: 65745426-7a01-11e8-8ac2-8c16451d917b :parametrized: yes :setup: Standalone Instance :steps: 1. Add test entry 2. Add ACI 3. User should follow ACI role :expectedresults: 1. Entry should be added 2. Operation should succeed 3. Operation should succeed """ conn = UserAccount(topo.standalone, user).bind(PW_DM) > UserAccount(conn, entry).replace('seeAlso', 'cn=1') suites/acl/userattr_test.py:216: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:281: in replace self.set(key, value, action=ldap.MOD_REPLACE) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:447: in set return self._instance.modify_ext_s(self._dn, [(action, key, value)], /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:613: in modify_ext_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:764: in result3 resp_type, resp_data, resp_msgid, decoded_resp_ctrls, retoid, retval = self.result4( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:774: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:340: in _ldap_call reraise(exc_type, exc_value, exc_traceback) /usr/lib64/python3.9/site-packages/ldap/compat.py:46: in reraise raise exc_value _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99d048cd0> func = <built-in method result4 of LDAP object at 0x7ff99d05e2a0> args = (5, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None exc_type = None, exc_value = None, exc_traceback = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E ldap.INSUFFICIENT_ACCESS: {'msgtype': 103, 'msgid': 5, 'result': 50, 'desc': 'Insufficient access', 'ctrls': [], 'info': "Insufficient 'write' privilege to the 'seeAlso' attribute of entry 'ou=children,ou=parents,ou=grandparents,ou=ancestors,ou=inheritance,dc=example,dc=com'.\n"} /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: INSUFFICIENT_ACCESS | |||
XFailed | suites/clu/dsidm_user_test.py::test_dsidm_user_get_dn | 0.09 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99e482dc0> create_test_user = None @pytest.mark.bz1893667 @pytest.mark.xfail(reason="Will fail because of bz1893667") @pytest.mark.skipif(ds_is_older("1.4.2"), reason="Not implemented") def test_dsidm_user_get_dn(topology_st, create_test_user): """ Test dsidm user get_dn option :id: 787bf278-87c3-402e-936e-6161799d098d :setup: Standalone instance :steps: 1. Run dsidm user get_dn for created user 2. Check the output content is correct :expectedresults: 1. Success 2. Success """ standalone = topology_st.standalone users = nsUserAccounts(standalone, DEFAULT_SUFFIX) test_user = users.get('test_user_1000') args = FakeArgs() args.dn = test_user.dn log.info('Empty the log file to prevent false data to check about user') topology_st.logcap.flush() log.info('Test dsidm user get_dn without json') > get_dn(standalone, DEFAULT_SUFFIX, topology_st.logcap.log, args) suites/clu/dsidm_user_test.py:214: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ /usr/local/lib/python3.9/site-packages/lib389/cli_idm/user.py:38: in get_dn _generic_get_dn(inst, basedn, log.getChild('_generic_get_dn'), MANY, dn, args) /usr/local/lib/python3.9/site-packages/lib389/cli_idm/__init__.py:101: in _generic_get_dn o = mc.get(dn=dn) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1144: in get results = self._get_dn(dn) /usr/local/lib/python3.9/site-packages/lib389/_mapped_object.py:1162: in _get_dn return self._instance.search_ext_s( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:863: in search_ext_s msgid = self.search_ext(base,scope,filterstr,attrlist,attrsonly,serverctrls,clientctrls,timeout,sizelimit) /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:853: in search_ext return self._ldap_call( /usr/local/lib/python3.9/site-packages/lib389/__init__.py:173: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7ff99e482e50> func = <built-in method search_ext of LDAP object at 0x7ff99e6df5d0> args = (<function get_dn.<locals>.<lambda> at 0x7ff99e6740d0>, 0, '(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))', ['dn'], 0, None, ...) kwargs = {}, diagnostic_message_success = None def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E TypeError: search_ext() argument 1 must be str, not function /usr/lib64/python3.9/site-packages/ldap/ldapobject.py:324: TypeError -------------------------------Captured log setup------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:210 Empty the log file to prevent false data to check about user INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:213 Test dsidm user get_dn without json -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
XFailed | suites/config/config_test.py::test_defaultnamingcontext_1 | 0.24 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99d18fd30> @pytest.mark.xfail(reason="This may fail due to bug 1610234") def test_defaultnamingcontext_1(topo): """This test case should be part of function test_defaultnamingcontext Please move it back after we have a fix for bug 1610234 """ log.info("Remove the original suffix which is currently nsslapd-defaultnamingcontext" "and check nsslapd-defaultnamingcontext become empty.") """ Please remove these declarations after moving the test to function test_defaultnamingcontext """ backends = Backends(topo.standalone) test_db2 = 'test2_db' test_suffix2 = 'dc=test2,dc=com' b2 = backends.create(properties={'cn': test_db2, 'nsslapd-suffix': test_suffix2}) b2.delete() > assert topo.standalone.config.get_attr_val_utf8('nsslapd-defaultnamingcontext') == ' ' E AssertionError: assert 'dc=example,dc=com' == ' ' E Strings contain only whitespace, escaping them using repr() E - ' ' E + 'dc=example,dc=com' suites/config/config_test.py:300: AssertionError -------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:288 Remove the original suffix which is currently nsslapd-defaultnamingcontextand check nsslapd-defaultnamingcontext become empty. | |||
XFailed | suites/export/export_test.py::test_dbtasks_db2ldif_with_non_accessible_ldif_file_path_output | 3.66 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99d1d22e0> @pytest.mark.bz1860291 @pytest.mark.xfail(reason="bug 1860291") @pytest.mark.skipif(ds_is_older("1.3.10", "1.4.2"), reason="Not implemented") def test_dbtasks_db2ldif_with_non_accessible_ldif_file_path_output(topo): """Export with db2ldif, giving a ldif file path which can't be accessed by the user (dirsrv by default) :id: fcc63387-e650-40a7-b643-baa68c190037 :setup: Standalone Instance - entries imported in the db :steps: 1. Stop the server 2. Launch db2ldif with a non accessible ldif file path 3. check the error reported in the command output :expected results: 1. Operation successful 2. Operation properly fails 3. An clear error message is reported as output of the cli """ export_ldif = '/tmp/nonexistent/export.ldif' log.info("Stopping the instance...") topo.standalone.stop() log.info("Performing an offline export to a non accessible ldif file path - should fail and output a clear error message") expected_output="No such file or directory" > run_db2ldif_and_clear_logs(topo, topo.standalone, DEFAULT_BENAME, export_ldif, expected_output) suites/export/export_test.py:150: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7ff99d1d22e0> instance = <lib389.DirSrv object at 0x7ff99ca651f0>, backend = 'userRoot' ldif = '/tmp/nonexistent/export.ldif', output_msg = 'No such file or directory' encrypt = False, repl = False def run_db2ldif_and_clear_logs(topology, instance, backend, ldif, output_msg, encrypt=False, repl=False): args = FakeArgs() args.instance = instance.serverid args.backend = backend args.encrypted = encrypt args.replication = repl args.ldif = ldif dbtasks_db2ldif(instance, topology.logcap.log, args) log.info('checking output msg') if not topology.logcap.contains(output_msg): log.error('The output message is not the expected one') > assert False E assert False suites/export/export_test.py:36: AssertionError ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/nonexistent/export.ldif -------------------------------Captured log call-------------------------------- INFO lib389.utils:export_test.py:145 Stopping the instance... INFO lib389.utils:export_test.py:148 Performing an offline export to a non accessible ldif file path - should fail and output a clear error message CRITICAL LogCapture:dbtasks.py:41 db2ldif failed INFO lib389.utils:export_test.py:33 checking output msg ERROR lib389.utils:export_test.py:35 The output message is not the expected one | |||
XFailed | suites/healthcheck/healthcheck_test.py::test_healthcheck_unable_to_query_backend | 1.22 | |
topology_st = <lib389.topologies.TopologyMain object at 0x7ff99cdb5be0> @pytest.mark.ds50873 @pytest.mark.bz1796343 @pytest.mark.skipif(ds_is_older("1.4.1"), reason="Not implemented") @pytest.mark.xfail(reason="Will fail because of bz1837315. Set proper version after bug is fixed") def test_healthcheck_unable_to_query_backend(topology_st): """Check if HealthCheck returns DSBLE0002 code :id: 716b1ff1-94bd-4780-98b8-96ff8ef21e30 :setup: Standalone instance :steps: 1. Create DS instance 2. Create a new root suffix and database 3. Disable new suffix 4. Use HealthCheck without --json option 5. Use HealthCheck with --json option :expectedresults: 1. Success 2. Success 3. Success 4. HealthCheck should return code DSBLE0002 5. HealthCheck should return code DSBLE0002 """ RET_CODE = 'DSBLE0002' NEW_SUFFIX = 'dc=test,dc=com' NEW_BACKEND = 'userData' standalone = topology_st.standalone log.info('Create new suffix') backends = Backends(standalone) backends.create(properties={ 'cn': NEW_BACKEND, 'nsslapd-suffix': NEW_SUFFIX, }) log.info('Disable the newly created suffix') mts = MappingTrees(standalone) mt_new = mts.get(NEW_SUFFIX) mt_new.replace('nsslapd-state', 'disabled') run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=True) log.info('Enable the suffix again and check if nothing is broken') mt_new.replace('nsslapd-state', 'backend') > run_healthcheck_and_flush_log(topology_st, standalone, RET_CODE, json=False) suites/healthcheck/healthcheck_test.py:453: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <lib389.topologies.TopologyMain object at 0x7ff99cdb5be0> instance = <lib389.DirSrv object at 0x7ff99cae9520>, searched_code = 'DSBLE0002' json = False, searched_code2 = None, list_checks = False, list_errors = False check = None, searched_list = None def run_healthcheck_and_flush_log(topology, instance, searched_code=None, json=False, searched_code2=None, list_checks=False, list_errors=False, check=None, searched_list=None): args = FakeArgs() args.instance = instance.serverid args.verbose = instance.verbose args.list_errors = list_errors args.list_checks = list_checks args.check = check args.dry_run = False args.json = json log.info('Use healthcheck with --json == {} option'.format(json)) health_check_run(instance, topology.logcap.log, args) if searched_list is not None: for item in searched_list: assert topology.logcap.contains(item) log.info('Healthcheck returned searched item: %s' % item) else: > assert topology.logcap.contains(searched_code) E AssertionError: assert False E + where False = <bound method LogCapture.contains of <LogCapture (NOTSET)>>('DSBLE0002') E + where <bound method LogCapture.contains of <LogCapture (NOTSET)>> = <LogCapture (NOTSET)>.contains E + where <LogCapture (NOTSET)> = <lib389.topologies.TopologyMain object at 0x7ff99cdb5be0>.logcap suites/healthcheck/healthcheck_test.py:49: AssertionError -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userdata:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userdata:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userdata:search ... INFO LogCapture:health.py:99 Checking backends:userdata:virt_attrs ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 4 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: backends:userdata:mappingtree INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- userdata INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO LogCapture:health.py:45 [2] DS Lint Error: DSBLE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: backends:userdata:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=test,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 27, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Check the server's error and access logs for more information. INFO LogCapture:health.py:45 [3] DS Lint Error: DSBLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: backends:userdata:mappingtree INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- userdata INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO LogCapture:health.py:45 [4] DS Lint Error: DSBLE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: backends:userdata:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=test,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 27, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Check the server's error and access logs for more information. INFO LogCapture:health.py:124 ===== End Of Report (4 Issues found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userdata" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userdata:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=test,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 27, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userdata:search" }, { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userdata" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userdata:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=test,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 27, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userdata:search" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userdata:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userdata:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userdata:search ... INFO LogCapture:health.py:99 Checking backends:userdata:virt_attrs ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 2 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:userdata:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=test,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The backend database has not been initialized yet INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:userdata:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=test,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The backend database has not been initialized yet INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== | |||
XFailed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_memberof_groups | 9.41 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoSuppliers object at 0x7ff99c655fd0> topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99ba3aeb0> base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7ff99c643f10> def test_memberof_groups(self, topology_m2, base_m2): """Check that conflict properly resolved for operations with memberOf and groups :id: 77f09b18-03d1-45da-940b-1ad2c2908eb3 :setup: Two supplier replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Enable memberOf plugin 2. Add 30 users to m1 and wait for replication to happen 3. Pause replication 4. Create a group on m1 and m2 5. Create a group on m1 and m2, delete from m1 6. Create a group on m1, delete from m1, and create on m2, 7. Create a group on m2 and m1, delete from m1 8. Create two different groups on m2 9. Resume replication 10. Check that the entries on both suppliers are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass 9. It should pass 10. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:403: XFailed | |||
XFailed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_managed_entries | 0.09 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoSuppliers object at 0x7ff99c6434c0> topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99ba3aeb0> def test_managed_entries(self, topology_m2): """Check that conflict properly resolved for operations with managed entries :id: 77f09b18-03d1-45da-940b-1ad2c2908eb4 :setup: Two supplier replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Create ou=managed_users and ou=managed_groups under test container 2. Configure managed entries plugin and add a template to test container 3. Add a user to m1 and wait for replication to happen 4. Pause replication 5. Create a user on m1 and m2 with a same group ID on both supplier 6. Create a user on m1 and m2 with a different group ID on both supplier 7. Resume replication 8. Check that the entries on both suppliers are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:494: XFailed | |||
XFailed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_nested_entries_with_children | 9.36 | |
self = <tests.suites.replication.conflict_resolve_test.TestTwoSuppliers object at 0x7ff99b859cd0> topology_m2 = <lib389.topologies.TopologyMain object at 0x7ff99ba3aeb0> base_m2 = <lib389.idm.nscontainer.nsContainer object at 0x7ff99b859100> def test_nested_entries_with_children(self, topology_m2, base_m2): """Check that conflict properly resolved for operations with nested entries with children :id: 77f09b18-03d1-45da-940b-1ad2c2908eb5 :setup: Two supplier replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Add 15 containers to m1 and wait for replication to happen 2. Pause replication 3. Create parent-child on supplier2 and supplier1 4. Create parent-child on supplier1 and supplier2 5. Create parent-child on supplier1 and supplier2 different child rdn 6. Create parent-child on supplier1 and delete parent on supplier2 7. Create parent on supplier1, delete it and parent-child on supplier2, delete them 8. Create parent on supplier1, delete it and parent-two children on supplier2 9. Create parent-two children on supplier1 and parent-child on supplier2, delete them 10. Create three subsets inside existing container entry, applying only part of changes on m2 11. Create more combinations of the subset with parent-child on m1 and parent on m2 12. Delete container on m1, modify user1 on m1, create parent on m2 and modify user2 on m2 13. Resume replication 14. Check that the entries on both suppliers are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass 9. It should pass 10. It should pass 11. It should pass 12. It should pass 13. It should pass 14. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:585: XFailed | |||
XFailed | suites/replication/conflict_resolve_test.py::TestThreeSuppliers::test_nested_entries | 64.49 | |
self = <tests.suites.replication.conflict_resolve_test.TestThreeSuppliers object at 0x7ff99c4d0340> topology_m3 = <lib389.topologies.TopologyMain object at 0x7ff99b86d910> base_m3 = <lib389.idm.nscontainer.nsContainer object at 0x7ff99b86d520> def test_nested_entries(self, topology_m3, base_m3): """Check that conflict properly resolved for operations with nested entries with children :id: 77f09b18-03d1-45da-940b-1ad2c2908eb6 :setup: Three supplier replication, test container for entries, enable plugin logging, audit log, error log for replica and access log for internal :steps: 1. Add 15 containers to m1 and wait for replication to happen 2. Pause replication 3. Create two child entries under each of two entries 4. Create three child entries under each of three entries 5. Create two parents on m1 and m2, then on m1 - create a child and delete one parent, on m2 - delete one parent and create a child 6. Test a few more parent-child combinations with three instances 7. Resume replication 8. Check that the entries on both suppliers are the same and replication is working :expectedresults: 1. It should pass 2. It should pass 3. It should pass 4. It should pass 5. It should pass 6. It should pass 7. It should pass 8. It should pass """ > pytest.xfail("Issue 49591 - work in progress") E _pytest.outcomes.XFailed: Issue 49591 - work in progress suites/replication/conflict_resolve_test.py:969: XFailed -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8cdfbbf4-5c46-476e-b526-973a98a362d4 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 73988e07-e51e-4c56-bf3e-8d51cb183640 / got description=8cdfbbf4-5c46-476e-b526-973a98a362d4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 81930d1e-af17-4e2e-abee-bc51bb136eae / got description=73988e07-e51e-4c56-bf3e-8d51cb183640) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 81930d1e-af17-4e2e-abee-bc51bb136eae / got description=73988e07-e51e-4c56-bf3e-8d51cb183640) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 81930d1e-af17-4e2e-abee-bc51bb136eae / got description=73988e07-e51e-4c56-bf3e-8d51cb183640) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 81930d1e-af17-4e2e-abee-bc51bb136eae / got description=73988e07-e51e-4c56-bf3e-8d51cb183640) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a8af25ee-f736-48eb-9a41-0e36f89e971e / got description=81930d1e-af17-4e2e-abee-bc51bb136eae) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaPort-0-65535-9999999999999999999999999999999999999999999999999999999999999999999-invalid-389] | 0.13 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaPort', too_small = '0', too_big = '65535' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '389' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_add(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf94 :parametrized: yes :setup: standalone instance :steps: 1. Use a value that is too small 2. Use a value that is too big 3. Use a value that overflows the int 4. Use a value with character value (not a number) 5. Use a valid value :expectedresults: 1. Add is rejected 2. Add is rejected 3. Add is rejected 4. Add is rejected 5. Add is allowed """ agmt_reset(topo) replica = replica_setup(topo) agmts = Agreements(topo.standalone, basedn=replica.dn) # Test too small perform_invalid_create(agmts, agmt_dict, attr, too_small) # Test too big > perform_invalid_create(agmts, agmt_dict, attr, too_big) suites/replication/replica_config_test.py:217: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ many = <lib389.agreement.Agreements object at 0x7ff99b9b38b0> properties = {'cn': 'test_agreement', 'nsDS5ReplicaBindDN': 'uid=tester', 'nsDS5ReplicaBindMethod': 'SIMPLE', 'nsDS5ReplicaHost': 'localhost.localdomain', ...} attr = 'nsds5ReplicaPort', value = '65535' def perform_invalid_create(many, properties, attr, value): my_properties = copy.deepcopy(properties) my_properties[attr] = value with pytest.raises(ldap.LDAPError) as ei: > many.create(properties=my_properties) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:108: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaPort-0-65535-9999999999999999999999999999999999999999999999999999999999999999999-invalid-389] | 0.22 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaPort', too_small = '0', too_big = '65535' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '389' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small > perform_invalid_modify(agmt, attr, too_small) suites/replication/replica_config_test.py:253: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b9b3d00> attr = 'nsds5ReplicaPort', value = '0' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.23 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaTimeout', too_small = '-1', too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b8cd7f0> attr = 'nsds5ReplicaTimeout', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaBusyWaitTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.23 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaBusyWaitTime', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b8e7d90> attr = 'nsds5ReplicaBusyWaitTime', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaSessionPauseTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.24 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaSessionPauseTime', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b8ba9d0> attr = 'nsds5ReplicaSessionPauseTime', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaFlowControlWindow--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.26 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaFlowControlWindow', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b985700> attr = 'nsds5ReplicaFlowControlWindow', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaFlowControlPause--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.23 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaFlowControlPause', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b9ad6a0> attr = 'nsds5ReplicaFlowControlPause', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/replica_config_test.py::test_agmt_num_modify[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.24 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99b711190> attr = 'nsds5ReplicaProtocolTimeout', too_small = '-1' too_big = '9223372036854775807' overflow = '9999999999999999999999999999999999999999999999999999999999999999999' notnum = 'invalid', valid = '6' @pytest.mark.xfail(reason="Agreement validation current does not work.") @pytest.mark.parametrize("attr, too_small, too_big, overflow, notnum, valid", agmt_attrs) def test_agmt_num_modify(topo, attr, too_small, too_big, overflow, notnum, valid): """Test all the number values you can set for a replica config entry :id: a8b47d4a-a089-4d70-8070-e6181209bf95 :parametrized: yes :setup: standalone instance :steps: 1. Replace a value that is too small 2. Replace a value that is too big 3. Replace a value that overflows the int 4. Replace a value with character value (not a number) 5. Replace a vlue with a valid value :expectedresults: 1. Value is rejected 2. Value is rejected 3. Value is rejected 4. Value is rejected 5. Value is allowed """ agmt = agmt_setup(topo) # Value too small perform_invalid_modify(agmt, attr, too_small) # Value too big > perform_invalid_modify(agmt, attr, too_big) suites/replication/replica_config_test.py:255: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ o = <lib389.agreement.Agreement object at 0x7ff99b7d7eb0> attr = 'nsds5ReplicaProtocolTimeout', value = '9223372036854775807' def perform_invalid_modify(o, attr, value): with pytest.raises(ldap.LDAPError) as ei: > o.replace(attr, value) E Failed: DID NOT RAISE <class 'ldap.LDAPError'> suites/replication/replica_config_test.py:113: Failed | |||
XFailed | suites/replication/ruvstore_test.py::test_memoryruv_sync_with_databaseruv | 3.06 | |
topo = <lib389.topologies.TopologyMain object at 0x7ff99c3ab4c0> @pytest.mark.xfail(reason="No method to safety access DB ruv currently exists online.") def test_memoryruv_sync_with_databaseruv(topo): """Check if memory ruv and database ruv are synced :id: 5f38ac5f-6353-460d-bf60-49cafffda5b3 :setup: Replication with two suppliers. :steps: 1. Add user to server and compare memory ruv and database ruv. 2. Modify description of user and compare memory ruv and database ruv. 3. Modrdn of user and compare memory ruv and database ruv. 4. Delete user and compare memory ruv and database ruv. :expectedresults: 1. For add user, the memory ruv and database ruv should be the same. 2. For modify operation, the memory ruv and database ruv should be the same. 3. For modrdn operation, the memory ruv and database ruv should be the same. 4. For delete operation, the memory ruv and database ruv should be the same. """ log.info('Adding user: {} to supplier1'.format(TEST_ENTRY_NAME)) users = UserAccounts(topo.ms['supplier1'], DEFAULT_SUFFIX) tuser = users.create(properties=USER_PROPERTIES) > _compare_memoryruv_and_databaseruv(topo, 'add') suites/replication/ruvstore_test.py:139: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topo = <lib389.topologies.TopologyMain object at 0x7ff99c3ab4c0> operation_type = 'add' def _compare_memoryruv_and_databaseruv(topo, operation_type): """Compare the memoryruv and databaseruv for ldap operations""" log.info('Checking memory ruv for ldap: {} operation'.format(operation_type)) replicas = Replicas(topo.ms['supplier1']) replica = replicas.list()[0] memory_ruv = replica.get_attr_val_utf8('nsds50ruv') log.info('Checking database ruv for ldap: {} operation'.format(operation_type)) > entry = replicas.get_ruv_entry(DEFAULT_SUFFIX) E AttributeError: 'Replicas' object has no attribute 'get_ruv_entry' suites/replication/ruvstore_test.py:81: AttributeError -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:136 Adding user: rep2lusr to supplier1 INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:75 Checking memory ruv for ldap: add operation INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:80 Checking database ruv for ldap: add operation | |||
XPassed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_Use_double_equal_instead_of_equal_in_the_targetattr] | 0.27 | |
No log output captured. | |||
XPassed | suites/acl/syntax_test.py::test_aci_invalid_syntax_fail[test_Use_double_equal_instead_of_equal_in_the_targetfilter] | 0.08 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.25 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaBusyWaitTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.27 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaSessionPauseTime--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.26 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaFlowControlWindow--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.25 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaFlowControlPause--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.29 | |
No log output captured. | |||
XPassed | suites/replication/replica_config_test.py::test_agmt_num_add[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.27 | |
No log output captured. | |||
Skipped | suites/config/regression_test.py::test_set_cachememsize_to_custom_value | 0.00 | |
('suites/config/regression_test.py', 34, 'Skipped: available memory is too low') | |||
Skipped | suites/ds_logs/ds_logs_test.py::test_etime_at_border_of_second | 0.00 | |
('suites/ds_logs/ds_logs_test.py', 736, 'Skipped: rsearch was removed') | |||
Skipped | suites/memory_leaks/MMR_double_free_test.py::test_MMR_double_free | 0.00 | |
('suites/memory_leaks/MMR_double_free_test.py', 67, "Skipped: Don't run if ASAN is not enabled") | |||
Skipped | suites/memory_leaks/range_search_test.py::test_range_search | 0.00 | |
('suites/memory_leaks/range_search_test.py', 24, "Skipped: Don't run if ASAN is not enabled") | |||
Skipped | suites/migration/export_data_test.py::test_export_data_from_source_host | 0.00 | |
('suites/migration/export_data_test.py', 24, 'Skipped: This test is meant to execute in specific test environment') | |||
Skipped | suites/migration/import_data_test.py::test_import_data_to_target_host | 0.00 | |
('suites/migration/import_data_test.py', 24, 'Skipped: This test is meant to execute in specific test environment') | |||
Skipped | suites/monitor/db_locks_monitor_test.py::test_exhaust_db_locks_basic[70] | 0.00 | |
('suites/monitor/db_locks_monitor_test.py', 140, 'Skipped: DB locks monitoring tests may take hours if the feature is not present or another failure exists. Also, the feature requires a big amount of space as we set nsslapd-db-locks to 1300000.') | |||
Skipped | suites/monitor/db_locks_monitor_test.py::test_exhaust_db_locks_basic[80] | 0.00 | |
('suites/monitor/db_locks_monitor_test.py', 140, 'Skipped: DB locks monitoring tests may take hours if the feature is not present or another failure exists. Also, the feature requires a big amount of space as we set nsslapd-db-locks to 1300000.') | |||
Skipped | suites/monitor/db_locks_monitor_test.py::test_exhaust_db_locks_basic[95] | 0.00 | |
('suites/monitor/db_locks_monitor_test.py', 140, 'Skipped: DB locks monitoring tests may take hours if the feature is not present or another failure exists. Also, the feature requires a big amount of space as we set nsslapd-db-locks to 1300000.') | |||
Skipped | suites/monitor/db_locks_monitor_test.py::test_exhaust_db_locks_big_pause | 0.00 | |
('suites/monitor/db_locks_monitor_test.py', 207, 'Skipped: DB locks monitoring tests may take hours if the feature is not present or another failure exists. Also, the feature requires a big amount of space as we set nsslapd-db-locks to 1300000.') | |||
Skipped | suites/replication/changelog_test.py::test_cldump_files_removed | 0.00 | |
('suites/replication/changelog_test.py', 236, 'Skipped: does not work for prefix builds') | |||
Skipped | suites/replication/changelog_test.py::test_changelog_compactdbinterval | 0.00 | |
('suites/replication/changelog_test.py', 631, 'Skipped: changelog compaction is done by the backend itself, with id2entry as well, nsslapd-changelogcompactdb-interval is no longer supported') | |||
Skipped | suites/rewriters/adfilter_test.py::test_adfilter_objectSid | 0.92 | |
('suites/rewriters/adfilter_test.py', 90, 'Skipped: It is missing samba python bindings') | |||
Skipped | tickets/ticket47815_test.py::test_ticket47815 | 0.00 | |
('tickets/ticket47815_test.py', 26, 'Skipped: Not implemented, or invalid by nsMemberOf') | |||
Skipped | tickets/ticket49121_test.py::test_ticket49121 | 0.00 | |
('tickets/ticket49121_test.py', 32, "Skipped: Don't run if ASAN is not enabled") | |||
Passed | suites/acl/aci_excl_filter_test.py::test_aci_with_exclude_filter | 9.41 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:96 Create an OU for them INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:98 Create an top org users INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:100 Add aci which contains extensible filter. INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:105 Add aci which contains extensible filter. INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:114 Adding OU entries ... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:117 Adding "ou" : OU0 under "dn" : dc=example,dc=com... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:47 Organisation OU0 created for ou :ou=OU0,dc=example,dc=com . INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:120 Adding outest under ou=OU0,dc=example,dc=com... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:47 Organisation outest created for ou :ou=outest,ou=OU0,dc=example,dc=com . INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:117 Adding "ou" : OU1 under "dn" : dc=example,dc=com... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:47 Organisation OU1 created for ou :ou=OU1,dc=example,dc=com . INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:120 Adding outest under ou=OU1,dc=example,dc=com... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:47 Organisation outest created for ou :ou=outest,ou=OU1,dc=example,dc=com . INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:128 Adding User: admin0 under ou=outest,ou=OU0,dc=example,dc=com... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:65 User created for dn :ou=admin0,ou=outest,ou=OU0,dc=example,dc=com . INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:128 Adding User: admin01 under ou=outest,ou=OU1,dc=example,dc=com... INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:65 User created for dn :ou=admin01,ou=outest,ou=OU1,dc=example,dc=com . INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:131 Bind as user admin01 INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:141 Verify 2 Entries returned for cn (cn=admin01) INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:145 Verified the entries do not contain 'telephonenumber' INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:145 Verified the entries do not contain 'telephonenumber' INFO tests.suites.acl.aci_excl_filter_test:aci_excl_filter_test.py:146 Test complete | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_EQ_ACI)] | 9.16 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_PRES_ACI)] | 0.60 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, REAL_SUB_ACI)] | 0.85 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, ROLE_PRES_ACI)] | 0.49 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, ROLE_SUB_ACI)] | 0.38 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_EQ_ACI)] | 0.49 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_PRES_ACI)] | 0.34 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, COS_SUB_ACI)] | 0.60 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_positive[(ENG_USER, ENG_MANAGER, LDAPURL_ACI)] | 0.67 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, REAL_EQ_ACI)] | 0.39 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_OU, REAL_PRES_ACI)] | 0.38 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, REAL_SUB_ACI)] | 0.38 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_EQ_ACI)] | 0.37 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_PRES_ACI)] | 0.38 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, ROLE_SUB_ACI)] | 0.36 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_EQ_ACI)] | 0.35 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_PRES_ACI)] | 0.36 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, SALES_MANAGER, COS_SUB_ACI)] | 0.34 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(SALES_UESER, SALES_MANAGER, LDAPURL_ACI)] | 0.38 | |
No log output captured. | |||
Passed | suites/acl/acivattr_test.py::test_negative[(ENG_USER, ENG_MANAGER, ROLE_EQ_ACI)] | 2.92 | |
No log output captured. | |||
Passed | suites/acl/acl_deny_test.py::test_multi_deny_aci | 24.49 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389:acl_deny_test.py:47 Add uid=tuser1,ou=People,dc=example,dc=com INFO lib389:acl_deny_test.py:58 Add uid=tuser,ou=People,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO lib389:acl_deny_test.py:90 Pass 1 INFO lib389:acl_deny_test.py:93 Testing two searches behave the same... INFO lib389:acl_deny_test.py:136 Testing search does not return any entries... INFO lib389:acl_deny_test.py:90 Pass 2 INFO lib389:acl_deny_test.py:93 Testing two searches behave the same... INFO lib389:acl_deny_test.py:136 Testing search does not return any entries... INFO lib389:acl_deny_test.py:200 Test PASSED | |||
Passed | suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[lang-ja] | 22.76 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2c7a148c-581d-4e3a-a2a7-222672b5e77d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 75218ec2-c6e9-4cee-952f-8f71e4373b7d / got description=2c7a148c-581d-4e3a-a2a7-222672b5e77d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'lang-ja' subtype======== INFO tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute INFO tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute INFO tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype -------------------------------Captured log call-------------------------------- INFO tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute INFO tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found -----------------------------Captured log teardown------------------------------ INFO tests.suites.acl.acl_test:acl_test.py:92 Finally, delete an ACI with the 'lang-ja' subtype | |||
Passed | suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[binary] | 0.26 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'binary' subtype======== INFO tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute INFO tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute INFO tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype -------------------------------Captured log call-------------------------------- INFO tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute INFO tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found -----------------------------Captured log teardown------------------------------ INFO tests.suites.acl.acl_test:acl_test.py:92 Finally, delete an ACI with the 'binary' subtype | |||
Passed | suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[phonetic] | 0.31 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.acl.acl_test:acl_test.py:76 ========Executing test with 'phonetic' subtype======== INFO tests.suites.acl.acl_test:acl_test.py:77 Add a target attribute INFO tests.suites.acl.acl_test:acl_test.py:80 Add a user attribute INFO tests.suites.acl.acl_test:acl_test.py:88 Add an ACI with attribute subtype -------------------------------Captured log call-------------------------------- INFO tests.suites.acl.acl_test:acl_test.py:118 Search for the added attribute INFO tests.suites.acl.acl_test:acl_test.py:125 The added attribute was found -----------------------------Captured log teardown------------------------------ INFO tests.suites.acl.acl_test:acl_test.py:92 Finally, delete an ACI with the 'phonetic' subtype | |||
Passed | suites/acl/acl_test.py::test_mode_default_add_deny | 0.56 | |
-------------------------------Captured log setup------------------------------- INFO lib389:acl_test.py:233 ######## INITIALIZATION ######## INFO lib389:acl_test.py:236 Add uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:254 Add cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:258 Add cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:262 Add cn=excepts,cn=accounts,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:294 ######## mode moddn_aci : ADD (should fail) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:302 Try to add cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:311 Exception (expected): INSUFFICIENT_ACCESS | |||
Passed | suites/acl/acl_test.py::test_mode_default_delete_deny | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:329 ######## DELETE (should fail) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:336 Try to delete cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:341 Exception (expected): INSUFFICIENT_ACCESS | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[0-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] | 0.22 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (0) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account0,cn=staged user,dc=example,dc=com -> uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account0,cn=staged user,dc=example,dc=com -> uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[1-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] | 0.30 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (1) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account1,cn=staged user,dc=example,dc=com -> uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account1,cn=staged user,dc=example,dc=com -> uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[2-cn=staged user,dc=example,dc=com-cn=bad*,dc=example,dc=com-True] | 0.27 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (2) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account2,cn=staged user,dc=example,dc=com -> uid=new_account2,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account2,cn=staged user,dc=example,dc=com -> uid=new_account2,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:409 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[3-cn=st*,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] | 0.27 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (3) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account3,cn=staged user,dc=example,dc=com -> uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account3,cn=staged user,dc=example,dc=com -> uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[4-cn=bad*,dc=example,dc=com-cn=accounts,dc=example,dc=com-True] | 0.22 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (4) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account4,cn=staged user,dc=example,dc=com -> uid=new_account4,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account4,cn=staged user,dc=example,dc=com -> uid=new_account4,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:409 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[5-cn=st*,dc=example,dc=com-cn=ac*,dc=example,dc=com-False] | 0.22 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (5) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account5,cn=staged user,dc=example,dc=com -> uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account5,cn=staged user,dc=example,dc=com -> uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[6-None-cn=ac*,dc=example,dc=com-False] | 0.26 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (6) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account6,cn=staged user,dc=example,dc=com -> uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account6,cn=staged user,dc=example,dc=com -> uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[7-cn=st*,dc=example,dc=com-None-False] | 0.31 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (7) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account7,cn=staged user,dc=example,dc=com -> uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account7,cn=staged user,dc=example,dc=com -> uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod[8-None-None-False] | 0.26 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:376 ######## MOVE staging -> Prod (8) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:388 Try to MODDN uid=new_account8,cn=staged user,dc=example,dc=com -> uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:395 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:399 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:406 Try to MODDN uid=new_account8,cn=staged user,dc=example,dc=com -> uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod_9 | 0.76 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:453 ######## MOVE staging -> Prod (9) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:466 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:473 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:479 Disable the moddn right INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:484 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:492 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:499 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:521 Try to MODDN uid=new_account9,cn=staged user,dc=example,dc=com -> uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:532 Enable the moddn right INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:536 ######## MOVE staging -> Prod (10) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:548 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:555 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:572 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:579 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:588 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:594 Try to MODDN uid=new_account10,cn=staged user,dc=example,dc=com -> uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_moddn_prod_staging | 0.35 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:623 ######## MOVE staging -> Prod (11) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:636 Try to MODDN uid=new_account11,cn=staged user,dc=example,dc=com -> uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:643 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:647 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:653 Try to MODDN uid=new_account11,cn=staged user,dc=example,dc=com -> uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:668 Try to move back MODDN uid=new_account11,cn=accounts,dc=example,dc=com -> uid=new_account11,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:675 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_check_repl_M2_to_M1 | 1.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:705 Bind as cn=Directory Manager (M2) INFO lib389:acl_test.py:725 Update (M2) uid=new_account12,cn=staged user,dc=example,dc=com (description) INFO lib389:acl_test.py:738 Update uid=new_account12,cn=staged user,dc=example,dc=com (description) replicated on M1 | |||
Passed | suites/acl/acl_test.py::test_moddn_staging_prod_except | 0.24 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:763 ######## MOVE staging -> Prod (13) ######## INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:775 Try to MODDN uid=new_account13,cn=staged user,dc=example,dc=com -> uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:782 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:786 ######## MOVE to and from equality filter ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:160 Add a DENY aci under cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:793 Try to MODDN uid=new_account13,cn=staged user,dc=example,dc=com -> uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:799 ######## MOVE staging -> Prod/Except (14) ######## INFO lib389:acl_test.py:805 Try to MODDN uid=new_account14,cn=staged user,dc=example,dc=com -> uid=new_account14,cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:812 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:160 Add a DENY aci under cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_mode_default_ger_no_moddn | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:839 ######## mode moddn_aci : GER no moddn ######## INFO lib389:acl_test.py:850 dn: cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:850 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:853 ######## entryLevelRights: b'v' | |||
Passed | suites/acl/acl_test.py::test_mode_default_ger_with_moddn | 0.20 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:877 ######## mode moddn_aci: GER with moddn ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:895 dn: cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:895 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:898 ######## entryLevelRights: b'vn' INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn1 | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:928 ######## Disable the moddn aci mod ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:932 ######## mode legacy 1: GER no moddn ######## INFO lib389:acl_test.py:942 dn: cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:942 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:945 ######## entryLevelRights: b'v' | |||
Passed | suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn2 | 0.17 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:971 ######## Disable the moddn aci mod ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:975 ######## mode legacy 2: GER no moddn ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:992 dn: cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:992 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:995 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com | |||
Passed | suites/acl/acl_test.py::test_mode_legacy_ger_with_moddn | 0.17 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:1031 ######## Disable the moddn aci mod ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:1035 ######## mode legacy : GER with moddn ######## INFO lib389:acl_test.py:133 Bind as cn=Directory Manager INFO lib389:acl_test.py:139 Bind as uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1057 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1060 ######## entryLevelRights: b'vn' INFO lib389:acl_test.py:133 Bind as cn=Directory Manager | |||
Passed | suites/acl/acl_test.py::test_rdn_write_get_ger | 0.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389:acl_test.py:1071 ######## Add entry tuser ######## -------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:1097 ######## GER rights for anonymous ######## INFO lib389:acl_test.py:1107 dn: dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: ou=groups,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: ou=people,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: ou=services,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=demo_user,ou=people,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=demo_group,ou=groups,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=group_admin,ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=group_modify,ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=user_admin,ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=user_modify,ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=user_private_read,ou=permissions,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=replication_managers,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=bind_entry,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=excepts,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account0,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account1,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account2,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account3,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account4,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account5,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account6,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account7,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account8,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account9,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account10,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account11,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account12,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account13,cn=accounts,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account14,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account15,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account16,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account17,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account18,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: uid=new_account19,cn=staged user,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' INFO lib389:acl_test.py:1107 dn: cn=tuser,dc=example,dc=com INFO lib389:acl_test.py:1109 ######## entryLevelRights: b'v' | |||
Passed | suites/acl/acl_test.py::test_rdn_write_modrdn_anonymous | 3.86 | |
-------------------------------Captured log call-------------------------------- INFO lib389:acl_test.py:1136 dn: INFO lib389:acl_test.py:1138 ######## 'objectClass': [b'top'] INFO lib389:acl_test.py:1138 ######## 'defaultnamingcontext': [b'dc=example,dc=com'] INFO lib389:acl_test.py:1138 ######## 'dataversion': [b'020210605020108'] INFO lib389:acl_test.py:1138 ######## 'netscapemdsuffix': [b'cn=ldap://dc=localhost,dc=localdomain:39001'] INFO lib389:acl_test.py:1143 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:acl_test.py:1150 The entry was not renamed (expected) INFO lib389:acl_test.py:133 Bind as cn=Directory Manager | |||
Passed | suites/acl/default_aci_allows_self_write_test.py::test_acl_default_allow_self_write_nsuser | 19.98 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/default_aci_allows_self_write_test.py::test_acl_default_allow_self_write_user | 12.01 | |
------------------------------Captured stdout call------------------------------ uid cn sn uidNumber gidNumber homeDirectory | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_groupdn | 8.75 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/deladd_test.py::test_allow_add_access_to_anyone | 0.25 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_anyone | 0.22 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_not_to_userdn | 0.25 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_not_to_group | 0.25 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_add_access_to_parent | 0.29 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_parent | 0.72 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_dynamic_group | 0.33 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_to_dynamic_group_uid | 0.25 | |
No log output captured. | |||
Passed | suites/acl/deladd_test.py::test_allow_delete_access_not_to_dynamic_group | 1.38 | |
No log output captured. | |||
Passed | suites/acl/enhanced_aci_modrnd_test.py::test_enhanced_aci_modrnd | 10.17 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:32 Add a container: ou=test_ou_1,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:38 Add a container: ou=test_ou_2,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:44 Add a user: cn=test_user,ou=test_ou_1,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:58 Add an ACI 'allow (all)' by cn=test_user,ou=test_ou_1,dc=example,dc=com to the ou=test_ou_1,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:62 Add an ACI 'allow (all)' by cn=test_user,ou=test_ou_1,dc=example,dc=com to the ou=test_ou_2,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:93 Bind as cn=test_user,ou=test_ou_1,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:97 User MODRDN operation from ou=test_ou_1,dc=example,dc=com to ou=test_ou_2,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:103 Check there is no user in ou=test_ou_1,dc=example,dc=com INFO tests.suites.acl.enhanced_aci_modrnd_test:enhanced_aci_modrnd_test.py:109 Check there is our user in ou=test_ou_2,dc=example,dc=com | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_five | 9.21 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_six | 0.12 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_seven | 0.10 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eight | 0.36 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_nine | 0.11 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_ten | 0.11 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eleven | 0.11 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_twelve | 0.09 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_fourteen | 0.13 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_fifteen | 0.12 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_sixteen | 0.10 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_seventeen | 0.09 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_part2_test.py::test_undefined_in_group_eval_eighteen | 1.94 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_caching_changes | 9.73 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/globalgroup_test.py::test_deny_group_member_all_rights_to_user | 0.12 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deny_group_member_all_rights_to_group_members | 0.09 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_denial | 0.12 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_denial_two | 0.08 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_allow | 0.08 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_deeply_nested_groups_aci_allow_two | 0.10 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval | 0.27 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval_two | 0.09 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval_three | 0.10 | |
No log output captured. | |||
Passed | suites/acl/globalgroup_test.py::test_undefined_in_group_eval_four | 2.03 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_connectin_from_an_unauthorized_network | 0.20 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_ip_keyword_test_noip_cannot | 0.16 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_can_access_the_data_at_any_time | 0.15 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_can_access_the_data_only_in_the_morning | 0.16 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_can_access_the_data_only_in_the_afternoon | 0.15 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_timeofday_keyword | 1.19 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_dayofweek_keyword_test_everyday_can_access | 0.16 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_dayofweek_keyword_today_can_access | 0.15 | |
No log output captured. | |||
Passed | suites/acl/keywords_part2_test.py::test_user_cannot_access_the_data_at_all | 1.71 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_binds_with_a_password_and_can_access_the_data | 8.89 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/keywords_test.py::test_user_binds_with_a_bad_password_and_cannot_access_the_data | 0.09 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_anonymous_user_cannot_access_the_data | 0.13 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_authenticated_but_has_no_rigth_on_the_data | 0.14 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_the_bind_client_is_accessing_the_directory | 0.08 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_users_binds_with_a_password_and_can_access_the_data | 0.07 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_binds_without_any_password_and_cannot_access_the_data | 0.09 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_cannot_access_the_data_if_not_from_a_certain_domain | 0.12 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_dnsalias_keyword_test_nodns_cannot | 0.54 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_from_ipv4_or_ipv6_address[127.0.0.1] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/keywords_test.py::test_user_can_access_from_ipv4_or_ipv6_address[[::1]] | 2.16 | |
No log output captured. | |||
Passed | suites/acl/misc_test.py::test_accept_aci_in_addition_to_acl | 9.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/misc_test.py::test_more_then_40_acl_will_crash_slapd | 0.39 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_search_access_should_not_include_read_access | 0.14 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_only_allow_some_targetattr | 0.17 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_only_allow_some_targetattr_two | 0.52 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_memberurl_needs_to_be_normalized | 0.21 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_greater_than_200_acls_can_be_created | 3.79 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_server_bahaves_properly_with_very_long_attribute_names | 0.16 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_do_bind_as_201_distinct_users | 24.82 | |
-------------------------------Captured log setup------------------------------- INFO lib389:misc_test.py:76 Exception (expected): ALREADY_EXISTS | |||
Passed | suites/acl/misc_test.py::test_info_disclosure | 0.99 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_targetattr_with_a_single_attribute | 8.47 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_targetattr_with_multiple_attibutes | 0.48 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdn_all | 0.25 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdn_with_wildcards_in_dn | 0.21 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdn_with_multiple_dns | 0.83 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_target_with_wildcards | 0.32 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_write_access_to_userdnattr | 0.25 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_selfwrite_access_to_anyone | 0.23 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_uniquemember_should_also_be_the_owner | 0.35 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_aci_with_both_allow_and_deny | 0.30 | |
No log output captured. | |||
Passed | suites/acl/modify_test.py::test_allow_owner_to_modify_entry | 0.93 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_allow_write_privilege_to_anyone | 8.21 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/modrdn_test.py::test_allow_write_privilege_to_dynamic_group_with_scope_set_to_base_in_ldap_url | 0.21 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_write_access_to_naming_atributes | 0.21 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_write_access_to_naming_atributes_two | 0.27 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_access_aci_list_contains_any_deny_rule | 0.29 | |
No log output captured. | |||
Passed | suites/acl/modrdn_test.py::test_renaming_target_entry | 1.42 | |
No log output captured. | |||
Passed | suites/acl/repeated_ldap_add_test.py::test_repeated_ldap_add | 40.25 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ Entry uid=buser123,ou=BOU,dc=example,dc=com is locked -------------------------------Captured log call-------------------------------- INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:184 Testing Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, etc. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:186 Disabling accesslog logbuffering INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:189 Bind as {cn=Directory Manager,password} INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:192 Adding ou=BOU a bind user belongs to. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:197 Adding a bind user. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:204 Adding a test user. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:211 Deleting aci in dc=example,dc=com. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:214 While binding as DM, acquire an access log path and instance dir INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:220 Bind case 1. the bind user has no rights to read the entry itself, bind should be successful. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:221 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} who has no access rights. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:229 Access log path: /var/log/dirsrv/slapd-standalone1/access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:231 Bind case 2-1. the bind user does not exist, bind should fail with error INVALID_CREDENTIALS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:233 Bind as {uid=bogus,dc=example,dc=com,bogus} who does not exist. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:237 Exception (expected): INVALID_CREDENTIALS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:238 Desc Invalid credentials INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:246 Cause found - [04/Jun/2021:22:04:48.544314906 -0400] conn=1 op=11 RESULT err=49 tag=97 nentries=0 wtime=0.000146578 optime=0.003125814 etime=0.003271070 - No such entry INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:249 Bind case 2-2. the bind user's suffix does not exist, bind should fail with error INVALID_CREDENTIALS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:251 Bind as {uid=bogus,ou=people,dc=bogus,bogus} who does not exist. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:260 Cause found - [04/Jun/2021:22:04:49.551691200 -0400] conn=1 op=12 RESULT err=49 tag=97 nentries=0 wtime=0.000199566 optime=0.003006696 etime=0.003202083 - No suffix for bind dn found INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:263 Bind case 2-3. the bind user's password is wrong, bind should fail with error INVALID_CREDENTIALS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:265 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,bogus} who does not exist. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:269 Exception (expected): INVALID_CREDENTIALS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:270 Desc Invalid credentials INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:278 Cause found - [04/Jun/2021:22:04:50.586016956 -0400] conn=1 op=13 RESULT err=49 tag=97 nentries=0 wtime=0.000170936 optime=0.031213486 etime=0.031378778 - Invalid credentials INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:281 Adding aci for uid=buser123,ou=BOU,dc=example,dc=com to ou=BOU,dc=example,dc=com. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:283 aci: (targetattr="*")(version 3.0; acl "buser123"; allow(all) userdn = "ldap:///uid=buser123,ou=BOU,dc=example,dc=com";) INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:284 Bind as {cn=Directory Manager,password} INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:289 Bind case 3. the bind user has the right to read the entry itself, bind should be successful. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:290 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} which should be ok. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:293 The following operations are against the subtree the bind user uid=buser123,ou=BOU,dc=example,dc=com has no rights. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:297 Search case 1. the bind user has no rights to read the search entry, it should return no search results with <class 'ldap.SUCCESS'> INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching existing entry uid=tuser0,ou=people,dc=example,dc=com, which should be ok. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:303 Search case 2-1. the search entry does not exist, the search should return no search results with SUCCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,dc=example,dc=com, which should be ok. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:309 Search case 2-2. the search entry does not exist, the search should return no search results with SUCCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should be ok. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:141 Search should return none INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:316 Add case 1. the bind user has no rights AND the adding entry exists, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:322 Add case 2-1. the bind user has no rights AND the adding entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:328 Add case 2-2. the bind user has no rights AND the adding entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:335 Modify case 1. the bind user has no rights AND the modifying entry exists, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:341 Modify case 2-1. the bind user has no rights AND the modifying entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:347 Modify case 2-2. the bind user has no rights AND the modifying entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:354 Modrdn case 1. the bind user has no rights AND the renaming entry exists, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:360 Modrdn case 2-1. the bind user has no rights AND the renaming entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:366 Modrdn case 2-2. the bind user has no rights AND the renaming entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:372 Modrdn case 3. the bind user has no rights AND the node moving an entry to exists, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to existing superior ou=groups,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:378 Modrdn case 4-1. the bind user has no rights AND the node moving an entry to does not, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:384 Modrdn case 4-2. the bind user has no rights AND the node moving an entry to does not, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:391 Delete case 1. the bind user has no rights AND the deleting entry exists, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:397 Delete case 2-1. the bind user has no rights AND the deleting entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:403 Delete case 2-2. the bind user has no rights AND the deleting entry does not exist, it should fail with INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with INSUFFICIENT_ACCESS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): INSUFFICIENT_ACCESS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Insufficient access INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:407 EXTRA: Check no regressions INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:408 Adding aci for uid=buser123,ou=BOU,dc=example,dc=com to dc=example,dc=com. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:410 Bind as {cn=Directory Manager,password} INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:415 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123}. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:425 Search case. the search entry does not exist, the search should fail with NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Searching non-existing entry uid=bogus,ou=people,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:431 Add case. the adding entry already exists, it should fail with ALREADY_EXISTS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Adding existing entry uid=tuser0,ou=people,dc=example,dc=com, which should fail with ALREADY_EXISTS. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): ALREADY_EXISTS INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc Already exists INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:436 Modify case. the modifying entry does not exist, it should fail with NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Modifying non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:441 Modrdn case 1. the renaming entry does not exist, it should fail with NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Renaming non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:446 Modrdn case 2. the node moving an entry to does not, it should fail with NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Moving to non-existing superior ou=OU,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:451 Delete case. the deleting entry does not exist, it should fail with NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:108 Deleting non-existing entry uid=bogus,dc=example,dc=com, which should fail with NO_SUCH_OBJECT. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:131 Exception (expected): NO_SUCH_OBJECT INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:132 Desc No such object INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:148 PASSED INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:454 Inactivate uid=buser123,ou=BOU,dc=example,dc=com INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:461 ['/usr/sbin/dsidm', 'standalone1', '-b', 'dc=example,dc=com', 'account', 'lock', 'uid=buser123,ou=BOU,dc=example,dc=com'] INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:465 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} which should fail with UNWILLING_TO_PERFORM. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:469 Exception (expected): UNWILLING_TO_PERFORM INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:470 Desc Server is unwilling to perform INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:473 Bind as {uid=buser123,ou=BOU,dc=example,dc=com,bogus} which should fail with UNWILLING_TO_PERFORM. INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:477 Exception (expected): UNWILLING_TO_PERFORM INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:478 Desc Server is unwilling to perform INFO tests.suites.acl.repeated_ldap_add_test:repeated_ldap_add_test.py:481 SUCCESS | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, NESTED_ROLE_TESTER)] | 9.08 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, NESTED_ROLE_TESTER)] | 0.36 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(MARY_ROLE, NOT_RULE_ACCESS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, OR_RULE_ACCESS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, OR_RULE_ACCESS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(STEVE_ROLE, ALL_ACCESS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(HARRY_ROLE, ALL_ACCESS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_positive[(MARY_ROLE, ALL_ACCESS)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(MARY_ROLE, NESTED_ROLE_TESTER)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(STEVE_ROLE, NOT_RULE_ACCESS)] | 0.12 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(HARRY_ROLE, NOT_RULE_ACCESS)] | 0.12 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_seealso_negative[(MARY_ROLE , OR_RULE_ACCESS)] | 0.36 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_positive[NOT_RULE_ACCESS] | 0.18 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_positive[ALL_ACCESS] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_negaive[NESTED_ROLE_TESTER] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/roledn_test.py::test_mod_anonseealso_negaive[OR_RULE_ACCESS] | 1.57 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_non_leaf | 9.41 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_wildcard_non_leaf | 0.50 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with__target_set_on_wildcard_leaf | 0.49 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_equality_search | 0.40 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_equality_search_two | 0.58 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_substring_search | 0.29 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_substring_search_two | 1.65 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_boolean_or_of_two_equality_search | 0.22 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_to__userdn_two | 0.41 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_userdn | 0.53 | |
No log output captured. | |||
Passed | suites/acl/search_real_part2_test.py::test_deny_all_access_with_targetfilter_using_presence_search | 1.14 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url | 8.57 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url_two | 0.49 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_search_access_to_userdn_with_ldap_url_matching_all_users | 0.58 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_a_dynamic_group | 0.90 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_with_host_port_set_on_ldap_url | 0.37 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_with_scope_set_to_one_in_ldap_url | 0.44 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_read_access_to_dynamic_group_two | 0.53 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_deny_access_to_group_should_deny_access_to_all_uniquemember | 0.57 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_entry_with_lots_100_attributes | 8.21 | |
No log output captured. | |||
Passed | suites/acl/search_real_part3_test.py::test_groupdnattr_value_is_another_group | 2.03 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with_target_set | 8.68 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_to_a_target_with_wild_card | 0.34 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_without_a_target_set | 1.30 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_read_search_and_compare_access_with_target_and_targetattr_set | 1.11 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_read_access_to_multiple_groupdns | 0.93 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_to_userdnattr | 0.27 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with__target_set | 0.52 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with__targetattr_set | 1.14 | |
No log output captured. | |||
Passed | suites/acl/search_real_test.py::test_deny_all_access_with_targetattr_set | 3.64 | |
No log output captured. | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_add | 9.38 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389:selfdn_permissions_test.py:58 Add OCticket47653 that allows 'member' attribute INFO lib389:selfdn_permissions_test.py:63 Add cn=bind_entry, dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO lib389:selfdn_permissions_test.py:106 ######################### ADD ###################### INFO lib389:selfdn_permissions_test.py:109 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:139 Try to add Add cn=test_entry, dc=example,dc=com (aci is missing): dn: cn=test_entry, dc=example,dc=com cn: test_entry member: cn=bind_entry, dc=example,dc=com objectclass: top objectclass: person objectclass: OCticket47653 postalAddress: here postalCode: 1234 sn: test_entry INFO lib389:selfdn_permissions_test.py:143 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:selfdn_permissions_test.py:147 Bind as cn=Directory Manager and add the ADD SELFDN aci INFO lib389:selfdn_permissions_test.py:159 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:164 Try to add Add cn=test_entry, dc=example,dc=com (member is missing) INFO lib389:selfdn_permissions_test.py:172 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:selfdn_permissions_test.py:178 Try to add Add cn=test_entry, dc=example,dc=com (with several member values) INFO lib389:selfdn_permissions_test.py:181 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:selfdn_permissions_test.py:184 Try to add Add cn=test_entry, dc=example,dc=com should be successful | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_search | 0.50 | |
-------------------------------Captured log call-------------------------------- INFO lib389:selfdn_permissions_test.py:205 ######################### SEARCH ###################### INFO lib389:selfdn_permissions_test.py:207 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:211 Try to search cn=test_entry, dc=example,dc=com (aci is missing) INFO lib389:selfdn_permissions_test.py:216 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO lib389:selfdn_permissions_test.py:229 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:233 Try to search cn=test_entry, dc=example,dc=com should be successful | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_modify | 0.41 | |
-------------------------------Captured log call-------------------------------- INFO lib389:selfdn_permissions_test.py:256 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:259 ######################### MODIFY ###################### INFO lib389:selfdn_permissions_test.py:263 Try to modify cn=test_entry, dc=example,dc=com (aci is missing) INFO lib389:selfdn_permissions_test.py:267 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:selfdn_permissions_test.py:271 Bind as cn=Directory Manager and add the WRITE SELFDN aci INFO lib389:selfdn_permissions_test.py:284 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:288 Try to modify cn=test_entry, dc=example,dc=com. It should succeeds | |||
Passed | suites/acl/selfdn_permissions_test.py::test_selfdn_permission_delete | 2.44 | |
-------------------------------Captured log call-------------------------------- INFO lib389:selfdn_permissions_test.py:314 ######################### DELETE ###################### INFO lib389:selfdn_permissions_test.py:317 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:322 Try to delete cn=test_entry, dc=example,dc=com (aci is missing) INFO lib389:selfdn_permissions_test.py:325 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:selfdn_permissions_test.py:329 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO lib389:selfdn_permissions_test.py:341 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:selfdn_permissions_test.py:345 Try to delete cn=test_entry, dc=example,dc=com should be successful | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_1] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_2] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_3] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_4] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_5] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_6] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_7] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_8] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_9] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_10] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_11] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_12] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_13] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_14] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_15] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_16] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_17] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_19] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_21] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_22] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_targattrfilters_23] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_acl_mispel] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_acl_string] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Wrong_version_string] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Missing_version_string] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Authenticate_statement] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Multiple_targets] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Target_set_to_self] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_ldap_instead_of_ldap] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_more_than_three] | 0.24 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_target_set_with_less_than_three] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_bind_rule_set_with_less_than_three] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_semicolon_instead_of_comma_in_permission] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_in_the_target] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_use_double_equal_instead_of_equal_in_user_and_group_access] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_donot_cote_the_name_of_the_aci] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_1] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_2] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_extra_parentheses_case_3] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_no_semicolon_at_the_end_of_the_aci] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_a_character_different_of_a_semicolon_at_the_end_of_the_aci] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_bad_filter] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_in_the_targattrfilters] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_aci_invalid_syntax[test_Use_double_equal_instead_of_equal_inside_the_targattrfilters] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/syntax_test.py::test_target_set_above_the_entry_test | 1.74 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,ROLEDNACCESS)] | 9.89 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,USERDNACCESS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,GROUPDNACCESS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,LDAPURLACCESS)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(CAN,ATTRNAMEACCESS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_0, OU_2)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_1,ANCESTORS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_2,GRANDPARENTS)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,OU_2)] | 0.12 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4, ANCESTORS)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,GRANDPARENTS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,PARENTS)] | 0.07 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_positive[(LEVEL_4,CHILDREN)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,ROLEDNACCESS)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,USERDNACCESS)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,GROUPDNACCESS)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,LDAPURLACCESS)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(CANNOT,ATTRNAMEACCESS)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0, ANCESTORS)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,GRANDPARENTS)] | 0.08 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,PARENTS)] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_0,CHILDREN)] | 0.10 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_2,PARENTS)] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_mod_see_also_negative[(LEVEL_4,GRANDSONS)] | 0.41 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=USERDNACCESS,ou=Accounting,dc=example,dc=com] | 0.11 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=ROLEDNACCESS,ou=Accounting,dc=example,dc=com] | 0.09 | |
No log output captured. | |||
Passed | suites/acl/userattr_test.py::test_last_three[uid=Ananda Borah,ou=Accounting,dc=example,dc=com-uid=GROUPDNACCESS,ou=Accounting,dc=example,dc=com] | 2.29 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_we_can_search_as_expected | 8.13 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/valueacl_part2_test.py::test_we_can_mod_title_as_expected | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_modify_with_multiple_filters | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_denied_by_multiple_filters | 0.36 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_allowed_add_one_attribute | 0.72 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_cannot_add_an_entry_with_attribute_values_we_are_not_allowed_add | 0.46 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_on_modrdn | 0.37 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_on_modrdn_allow | 0.44 | |
No log output captured. | |||
Passed | suites/acl/valueacl_part2_test.py::test_targattrfilters_keyword | 1.50 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_not_allowed_to_delete | 8.29 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/acl/valueacl_test.py::test_donot_allow_write_access_to_title_if_value_is_not_architect | 0.60 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_allowed_to_delete | 0.33 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_delete_an_attribute_value_we_are_not_allowed_to_deleted | 0.58 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_modify_replace | 0.37 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_modify_delete | 0.36 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_replace_an_attribute_if_we_lack | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_have_del_rights_to_all_attr_value | 0.32 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_donot_have_del_rights_to_all_attr_value | 0.34 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_remove_an_attribute_if_we_have_del_rights_to_all_attr_values | 0.58 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_cantnot_delete_an_entry_with_attribute_values_we_are_not_allowed_delete | 0.45 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_we_can_add_and_delete_an_entry_with_attribute_values_we_are_allowed_add_and_delete | 0.33 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_title | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_allow_to_modify | 0.36 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_selfwrite_does_not_confer_write_on_a_targattrfilters_atribute | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_selfwrite_continues_to_give_rights_to_attr_in_targetattr_list | 0.37 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_add_an_attribute_value_we_are_allowed_to_add_with_ldapanyone | 0.33 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_hierarchy | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_targattrfilters_and_search_permissions_and_that_ldapmodify_works_as_expected | 0.35 | |
No log output captured. | |||
Passed | suites/acl/valueacl_test.py::test_targattrfilters_and_search_permissions_and_that_ldapmodify_works_as_expected_two | 2.29 | |
No log output captured. | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_basic | 22.13 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:35 Enable TLS for attribute encryption INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:38 Enables attribute encryption INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:42 Enables attribute encryption for employeeNumber and telephoneNumber INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:46 Add a test user with encrypted attributes -------------------------------Captured log call-------------------------------- INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:81 Restart the server INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:87 Extracting values of cn from the list of objects in encrypt_attrs INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:88 And appending the cn values in a list INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:93 Check employeenumber encryption is enabled INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:96 Check telephoneNumber encryption is enabled INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:99 Check that encrypted attribute is present for user i.e. telephoneNumber | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_export_import_ciphertext | 13.35 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_ciphertext.ldif -------------------------------Captured log call-------------------------------- INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:127 Export data as ciphertext INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:138 Check that the attribute is present in the exported file INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:139 Check that the encrypted value of attribute is not present in the exported file INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:145 Delete the test user entry with encrypted data INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:148 Import data as ciphertext, which was exported previously INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:159 Check that the data with encrypted attribute is imported properly | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_export_import_plaintext | 14.74 | |
------------------------------Captured stderr call------------------------------ [04/Jun/2021:22:08:22.279185024 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [04/Jun/2021:22:08:22.285411541 -0400] - INFO - slapd_extract_cert - CA CERT NAME: Self-Signed-CA [04/Jun/2021:22:08:22.287933628 -0400] - ERR - slapd_extract_cert - Unable to open "/tmp/slapd-standalone1/Self-Signed-CA.pem" for writing (-5950, 2). [04/Jun/2021:22:08:22.290220340 -0400] - WARN - Security Initialization - SSL alert: Sending pin request to SVRCore. You may need to run systemd-tty-ask-password-agent to provide the password if pin.txt does not exist. [04/Jun/2021:22:08:22.493424713 -0400] - INFO - slapd_extract_cert - SERVER CERT NAME: Server-Cert [04/Jun/2021:22:08:22.496524966 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [04/Jun/2021:22:08:22.499398562 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_plaintext.ldif [04/Jun/2021:22:08:29.168655040 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [04/Jun/2021:22:08:29.171921420 -0400] - INFO - slapd_extract_cert - CA CERT NAME: Self-Signed-CA [04/Jun/2021:22:08:29.173999188 -0400] - ERR - slapd_extract_cert - Unable to open "/tmp/slapd-standalone1/Self-Signed-CA.pem" for writing (-5950, 2). [04/Jun/2021:22:08:29.176014149 -0400] - WARN - Security Initialization - SSL alert: Sending pin request to SVRCore. You may need to run systemd-tty-ask-password-agent to provide the password if pin.txt does not exist. [04/Jun/2021:22:08:29.369836152 -0400] - INFO - slapd_extract_cert - SERVER CERT NAME: Server-Cert [04/Jun/2021:22:08:29.424196563 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there [04/Jun/2021:22:08:29.427953690 -0400] - WARN - Security Initialization - /tmp is not a private namespace. pem files not exported there -------------------------------Captured log call-------------------------------- INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:189 Export data as plain text INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:200 Check that the attribute is present in the exported file INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:201 Check that the plain text value of the encrypted attribute is present in the exported file INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:205 Delete the test user entry with encrypted data INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:208 Import data as plain text, which was exported previously INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:219 Check that the attribute is imported properly | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_unindexed | 5.80 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/emp_num_ciphertext.ldif -------------------------------Captured log call-------------------------------- INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:242 Export data as cipher text INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:253 Check that the attribute is present in the exported file INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:254 Check that the encrypted value of attribute is not present in the exported file | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_multiple_backends | 9.21 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db1.ldif ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db2.ldif -------------------------------Captured log call-------------------------------- INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:287 Add two test backends INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:307 Enables attribute encryption for telephoneNumber in test_backend1 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:312 Enables attribute encryption for employeeNumber in test_backend2 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:317 Add a test user with encrypted attributes in both backends INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:326 Export data as ciphertext from both backends INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:343 Check that the attribute is present in the exported file in db1 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:344 Check that the encrypted value of attribute is not present in the exported file in db1 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:350 Check that the attribute is present in the exported file in db2 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:351 Check that the encrypted value of attribute is not present in the exported file in db2 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:357 Delete test backends | |||
Passed | suites/attr_encryption/attr_encryption_test.py::test_attr_encryption_backends | 10.00 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db1.ldif ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/export_db2.ldif -------------------------------Captured log call-------------------------------- INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:386 Add two test backends INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:405 Enables attribute encryption for telephoneNumber in test_backend1 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:410 Add a test user with telephoneNumber in both backends INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:419 Export data as ciphertext from both backends INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:436 Check that the attribute is present in the exported file in db1 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:437 Check that the encrypted value of attribute is not present in the exported file in db1 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:443 Check that the attribute is present in the exported file in db2 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:444 Check that the value of attribute is also present in the exported file in db2 INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:450 Delete test backends -----------------------------Captured log teardown------------------------------ INFO tests.suites.attr_encryption.attr_encryption_test:attr_encryption_test.py:53 Remove attribute encryption for various attributes | |||
Passed | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_config | 8.15 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_nsuser | 10.19 | |
No log output captured. | |||
Passed | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_disabled | 9.27 | |
No log output captured. | |||
Passed | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_directory_manager | 8.24 | |
No log output captured. | |||
Passed | suites/auth_token/basic_auth_test.py::test_ldap_auth_token_anonymous | 9.65 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_mod_test.py::test_mods | 23.82 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.automember_plugin.automember_mod_test:automember_mod_test.py:135 Test PASSED | |||
Passed | suites/automember_plugin/automember_test.py::test_automemberscope | 12.43 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/automember_plugin/automember_test.py::test_automemberfilter | 0.07 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_adduser | 0.28 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_delete_default_group | 4.29 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_no_default_group | 4.71 | |
No log output captured. | |||
Passed | suites/automember_plugin/automember_test.py::test_delete_target_group | 6.76 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_disable_the_plug_in | 15.00 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. | |||
Passed | suites/automember_plugin/basic_test.py::test_custom_config_area | 0.32 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_ability_to_control_behavior_of_modifiers_name | 9.00 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_posixaccount_objectclass_automemberdefaultgroup | 0.37 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_duplicated_member_attributes_added_when_the_entry_is_re_created | 0.63 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_multi_valued_automemberdefaultgroup_for_hostgroups | 0.11 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_plugin_creates_member_attributes_of_the_automemberdefaultgroup | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_multi_valued_automemberdefaultgroup_with_uniquemember | 8.03 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_invalid_automembergroupingattr_member | 0.79 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_valid_and_invalid_automembergroupingattr | 0.18 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_add_regular_expressions_for_user_groups_and_check_for_member_attribute_after_adding_users | 0.18 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_22-5288-5289-Contractor-5291-5292-Contractors] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_21-1161-1162-Contractor-1162-1163-Contractors] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_20-1188-1189-CEO-1191-1192-Contractors] | 0.13 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_15-9288-9289-Manager-9291-9292-Managers] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_14-561-562-Manager-562-563-Managers] | 0.11 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_matching_gid_role_inclusive_regular_expression[autoMembers_13-9788-9789-VPEngg-9392-9393-Managers] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_26-5788-5789-Intern-Contractors-SuffDef1-5] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_25-9788-9789-Employee-Contractors-Managers-1] | 0.10 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_24-1110-1111-Employee-Contractors-SuffDef1-5] | 0.13 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_23-2788-2789-Contractor-Contractors-SuffDef1-5] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_19-5788-5789-HRManager-Managers-SuffDef1-5] | 0.15 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_18-6788-6789-Junior-Managers-SuffDef1-5] | 0.14 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_17-562-563-Junior-Managers-SuffDef1-5] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_gid_and_role_inclusive_exclusive_regular_expression[autoMembers_16-6788-6789-Manager-Managers-SuffDef1-5] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_32-555-720-Employee-SubDef1-SubDef3] | 0.13 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_31-515-200-Junior-SubDef1-SubDef5] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_30-999-400-Supervisor-SubDef1-SubDef2] | 0.13 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_contractors_exclusive_regex_rules_member_uid[autoMembers_28-555-3663-ContractHR-Contractors,cn=subsuffGroups-Managers,cn=subsuffGroups] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_27-595-690-ContractHR-Managers-Contractors] | 0.13 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_29-8195-2753-Employee-Contractors-Managers] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_33-545-3333-Supervisor-Contractors-Managers] | 0.22 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_managers_inclusive_regex_rule[autoMembers_34-8195-693-Temporary-Managers-Contractors] | 0.12 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_reject_invalid_config_and_we_donot_deadlock_the_server | 7.93 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_re_build_task | 16.32 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_export_task | 10.97 | |
-------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:1090 Automember Export Updates task (task-06042021_221148) completed successfully | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_mapping | 2.26 | |
-------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:1141 Automember Map Updates task (task-06042021_221151) completed successfully | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_re_build | 9.52 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_export | 11.60 | |
-------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:1090 Automember Export Updates task (task-06042021_221210) completed successfully | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_run_re_build | 21.33 | |
No log output captured. | |||
Passed | suites/automember_plugin/basic_test.py::test_automemtask_run_export | 15.86 | |
-------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:1090 Automember Export Updates task (task-06042021_221248) completed successfully | |||
Passed | suites/automember_plugin/configuration_test.py::test_configuration | 14.85 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/backups/backup_test.py::test_missing_backend | 13.65 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/backups/backup_test.py::test_db_home_dir_online_backup | 5.69 | |
-------------------------------Captured log call-------------------------------- ERROR lib389:tasks.py:649 Error: backup task backup_06042021_221321 exited with -1 | |||
Passed | suites/betxns/betxn_test.py::test_betxt_7bit | 12.38 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.betxns.betxn_test:betxn_test.py:52 Running test_betxt_7bit... INFO tests.suites.betxns.betxn_test:betxn_test.py:78 test_betxt_7bit: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_betxn_attr_uniqueness | 4.83 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.betxns.betxn_test:betxn_test.py:133 test_betxn_attr_uniqueness: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_betxn_memberof | 4.54 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.betxns.betxn_test:betxn_test.py:179 test_betxn_memberof: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_betxn_modrdn_memberof_cache_corruption | 4.45 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.betxns.betxn_test:betxn_test.py:233 test_betxn_modrdn_memberof: PASSED | |||
Passed | suites/betxns/betxn_test.py::test_ri_and_mep_cache_corruption | 2.38 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.betxns.betxn_test:betxn_test.py:357 Test PASSED | |||
Passed | suites/chaining_plugin/paged_search_test.py::test_chaining_paged_search | 21.97 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO Accounts:_mapped_object.py:1071 Getting page 0 INFO Accounts:_mapped_object.py:1071 Getting page 1 INFO Accounts:_mapped_object.py:1071 Getting page 2 INFO Accounts:_mapped_object.py:1071 Getting page 3 | |||
Passed | suites/clu/clu_test.py::test_clu_pwdhash | 7.80 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.clu_test:clu_test.py:40 Running test_clu_pwdhash... INFO tests.suites.clu.clu_test:clu_test.py:54 pwdhash generated: {SSHA}5cJSwehYzQBimcMGi+gdiqYG6xyX0sxP1FOfkA== INFO tests.suites.clu.clu_test:clu_test.py:55 test_clu_pwdhash: PASSED | |||
Passed | suites/clu/clu_test.py::test_clu_pwdhash_mod | 2.31 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.clu_test:clu_test.py:78 Running test_clu_pwdhash_mod... INFO tests.suites.clu.clu_test:clu_test.py:87 pwdhash generated: {SSHA256}wZXmSgUsEoYoXzb2VBM2wSvIBn5YbEDRr9qfT3KA64U3u/09BONGIA== INFO tests.suites.clu.clu_test:clu_test.py:88 returned the hashed string using the algorithm set in nsslapd-rootpwstoragescheme | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_users | 14.19 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:119 Run ldifgen to create users ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=people,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - number=1000 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - rdn-cn=False INFO tests.suites.clu.dbgen_test:dbgen.py:61 - generic=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - start-idx=50 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - localize=False INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:196 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:122 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:127 Get number of accounts before import INFO tests.suites.clu.dbgen_test:dbgen_test.py:48 Stopping the server and running offline import... INFO tests.suites.clu.dbgen_test:dbgen_test.py:133 Check that accounts are imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_groups | 32.43 | |
------------------------------Captured stderr call------------------------------ ldap_add: Already exists (68) ldap_add: Already exists (68) ldap_add: Already exists (68) -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:183 Run ldifgen to create group ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=myGroup INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=groups,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - number=1 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - num-members=1000 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-members=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - member-attr=uniquemember INFO tests.suites.clu.dbgen_test:dbgen.py:61 - member-parent=ou=people,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:250 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:186 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:191 Get number of accounts before import INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:200 Check that accounts are imported INFO tests.suites.clu.dbgen_test:dbgen_test.py:203 Check that group is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_classic | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:256 Run ldifgen to create COS definition ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - type=classic INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos definitions,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-specifier=businessCategory INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-template=cn=sales,cn=classicCoS,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:259 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:267 Check that COS definition is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_pointer | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:322 Run ldifgen to create COS definition ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - type=pointer INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def_pointer INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos pointer definitions,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-template=cn=sales,cn=pointerCoS,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:325 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:333 Check that COS definition is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_indirect | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:387 Run ldifgen to create COS definition ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - type=indirect INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Postal_Def_indirect INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos indirect definitions,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-specifier=businessCategory INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr=['postalcode', 'telephonenumber'] INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:304 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:390 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:398 Check that COS definition is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_cos_template | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:449 Run ldifgen to create COS template ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Template INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=cos templates,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-priority=1 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - cos-attr-val=postalcode:12345 INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:341 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:452 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:460 Check that COS template is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_managed_role | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:511 Run ldifgen to create managed role ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Managed_Role INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=managed roles,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - type=managed INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:514 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:522 Check that managed role is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_filtered_role | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:571 Run ldifgen to create filtered role ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Filtered_Role INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=filtered roles,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - type=filtered INFO tests.suites.clu.dbgen_test:dbgen.py:61 - filter="objectclass=posixAccount" INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:574 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:582 Check that filtered role is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_nested_role | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:632 Run ldifgen to create nested role ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - NAME=My_Nested_Role INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=ou=nested roles,dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - type=nested INFO tests.suites.clu.dbgen_test:dbgen.py:61 - role-dn=['cn=some_role,ou=roles,dc=example,dc=com'] INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:391 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:635 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:643 Check that nested role is imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_mod_ldif_mixed | 34.81 | |
------------------------------Captured stderr call------------------------------ ldap_modify: Operation not allowed on RDN (67) ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldap_rename: Invalid DN syntax (34) additional info: invalid RDN ldapmodify: extra lines at end (line 43453, entry "uid=user0999,dc=example,dc=com") -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:702 Run ldifgen to create modification ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - parent=dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-users=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - delete-users=True INFO tests.suites.clu.dbgen_test:dbgen.py:61 - create-parent=False INFO tests.suites.clu.dbgen_test:dbgen.py:61 - num-users=1000 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - add-users=100 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - del-users=999 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - modrdn-users=100 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - mod-users=10 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - mod-attrs=['cn', 'uid', 'sn'] INFO tests.suites.clu.dbgen_test:dbgen.py:61 - randomize=False INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:467 Successfully created LDIF file: /var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen_test.py:705 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:710 Get number of accounts before import INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:719 Check that some accounts are imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbgen_test.py::test_dsconf_dbgen_nested_ldif | 27.40 | |
------------------------------Captured stderr call------------------------------ ldap_add: Already exists (68) -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbgen_test:dbgen_test.py:759 Run ldifgen to create nested ldif INFO tests.suites.clu.dbgen_test:dbgen.py:56 Generating LDIF with the following options: INFO tests.suites.clu.dbgen_test:dbgen.py:61 - suffix=dc=example,dc=com INFO tests.suites.clu.dbgen_test:dbgen.py:61 - node-limit=100 INFO tests.suites.clu.dbgen_test:dbgen.py:61 - num-users=600 INFO tests.suites.clu.dbgen_test:dbgen.py:62 - ldif-file=/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif INFO tests.suites.clu.dbgen_test:dbgen.py:63 Writing LDIF ... INFO tests.suites.clu.dbgen_test:dbgen.py:500 Successfully created nested LDIF file (/var/lib/dirsrv/slapd-standalone1/ldif/created.ldif) containing 6 nodes/subtrees INFO tests.suites.clu.dbgen_test:dbgen_test.py:762 Check if file exists INFO tests.suites.clu.dbgen_test:dbgen_test.py:67 Check if content is present in output INFO tests.suites.clu.dbgen_test:dbgen_test.py:71 Reset log file for next test INFO tests.suites.clu.dbgen_test:dbgen_test.py:767 Get number of accounts before import INFO tests.suites.clu.dbgen_test:dbgen_test.py:57 Add entries from ldif file with ldapmodify INFO tests.suites.clu.dbgen_test:dbgen_test.py:779 Check that accounts are imported -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbgen_test:dbgen_test.py:40 Delete files | |||
Passed | suites/clu/dbmon_test.py::test_dsconf_dbmon | 10.25 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbmon_test:dbmon_test.py:164 Sanity check for syntax INFO LogCapture:monitor.py:247 DB Monitor Report: 2021-06-04 22:16:23 INFO LogCapture:monitor.py:248 -------------------------------------------------------- INFO LogCapture:monitor.py:249 Database Cache: INFO LogCapture:monitor.py:250 - Cache Hit Ratio: 100% INFO LogCapture:monitor.py:251 - Free Space: 486.73 MB INFO LogCapture:monitor.py:252 - Free Percentage: 100.0% INFO LogCapture:monitor.py:253 - RO Page Drops: 0 INFO LogCapture:monitor.py:254 - Pages In: 0 INFO LogCapture:monitor.py:255 - Pages Out: 0 INFO LogCapture:monitor.py:256 INFO LogCapture:monitor.py:257 Normalized DN Cache: INFO LogCapture:monitor.py:258 - Cache Hit Ratio: 0% INFO LogCapture:monitor.py:259 - Free Space: 19.98 MB INFO LogCapture:monitor.py:260 - Free Percentage: 99.9% INFO LogCapture:monitor.py:261 - DN Count: 119 INFO LogCapture:monitor.py:262 - Evictions: 0 INFO LogCapture:monitor.py:263 INFO LogCapture:monitor.py:264 Backends: INFO LogCapture:monitor.py:266 - dc=example,dc=com (userRoot): INFO LogCapture:monitor.py:267 - Entry Cache Hit Ratio: 40% INFO LogCapture:monitor.py:268 - Entry Cache Count: 5 INFO LogCapture:monitor.py:269 - Entry Cache Free Space: 1.31 GB INFO LogCapture:monitor.py:270 - Entry Cache Free Percentage: 100.0% INFO LogCapture:monitor.py:271 - Entry Cache Average Size: 3.65 KB INFO LogCapture:monitor.py:272 - DN Cache Hit Ratio: 0% INFO LogCapture:monitor.py:273 - DN Cache Count: 5 INFO LogCapture:monitor.py:274 - DN Cache Free Space: 192.0 MB INFO LogCapture:monitor.py:275 - DN Cache Free Percentage: 100.0% INFO LogCapture:monitor.py:276 - DN Cache Average Size: 67.0 B INFO LogCapture:monitor.py:286 INFO tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log INFO tests.suites.clu.dbmon_test:dbmon_test.py:171 Sanity check for --indexes output INFO LogCapture:monitor.py:247 DB Monitor Report: 2021-06-04 22:16:23 INFO LogCapture:monitor.py:248 -------------------------------------------------------- INFO LogCapture:monitor.py:249 Database Cache: INFO LogCapture:monitor.py:250 - Cache Hit Ratio: 100% INFO LogCapture:monitor.py:251 - Free Space: 486.73 MB INFO LogCapture:monitor.py:252 - Free Percentage: 100.0% INFO LogCapture:monitor.py:253 - RO Page Drops: 0 INFO LogCapture:monitor.py:254 - Pages In: 0 INFO LogCapture:monitor.py:255 - Pages Out: 0 INFO LogCapture:monitor.py:256 INFO LogCapture:monitor.py:257 Normalized DN Cache: INFO LogCapture:monitor.py:258 - Cache Hit Ratio: 0% INFO LogCapture:monitor.py:259 - Free Space: 19.98 MB INFO LogCapture:monitor.py:260 - Free Percentage: 99.9% INFO LogCapture:monitor.py:261 - DN Count: 119 INFO LogCapture:monitor.py:262 - Evictions: 0 INFO LogCapture:monitor.py:263 INFO LogCapture:monitor.py:264 Backends: INFO LogCapture:monitor.py:266 - dc=example,dc=com (userRoot): INFO LogCapture:monitor.py:267 - Entry Cache Hit Ratio: 40% INFO LogCapture:monitor.py:268 - Entry Cache Count: 5 INFO LogCapture:monitor.py:269 - Entry Cache Free Space: 1.31 GB INFO LogCapture:monitor.py:270 - Entry Cache Free Percentage: 100.0% INFO LogCapture:monitor.py:271 - Entry Cache Average Size: 3.65 KB INFO LogCapture:monitor.py:272 - DN Cache Hit Ratio: 0% INFO LogCapture:monitor.py:273 - DN Cache Count: 5 INFO LogCapture:monitor.py:274 - DN Cache Free Space: 192.0 MB INFO LogCapture:monitor.py:275 - DN Cache Free Percentage: 100.0% INFO LogCapture:monitor.py:276 - DN Cache Average Size: 67.0 B INFO LogCapture:monitor.py:278 - Indexes: INFO LogCapture:monitor.py:280 - Index: parentid.db INFO LogCapture:monitor.py:281 - Cache Hit: 0 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: uid.db INFO LogCapture:monitor.py:281 - Cache Hit: 0 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: id2entry.db INFO LogCapture:monitor.py:281 - Cache Hit: 8 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: objectclass.db INFO LogCapture:monitor.py:281 - Cache Hit: 14 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: nsuniqueid.db INFO LogCapture:monitor.py:281 - Cache Hit: 0 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: cn.db INFO LogCapture:monitor.py:281 - Cache Hit: 0 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: ancestorid.db INFO LogCapture:monitor.py:281 - Cache Hit: 0 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: numsubordinates.db INFO LogCapture:monitor.py:281 - Cache Hit: 0 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: aci.db INFO LogCapture:monitor.py:281 - Cache Hit: 3 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:280 - Index: entryrdn.db INFO LogCapture:monitor.py:281 - Cache Hit: 16 INFO LogCapture:monitor.py:282 - Cache Miss: 0 INFO LogCapture:monitor.py:283 - Page In: 0 INFO LogCapture:monitor.py:284 - Page Out: 0 INFO LogCapture:monitor.py:285 INFO LogCapture:monitor.py:286 INFO tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log INFO tests.suites.clu.dbmon_test:dbmon_test.py:179 Sanity check for --json output INFO LogCapture:monitor.py:245 { "date": "2021-06-04 22:16:23", "dbcache": { "hit_ratio": "100", "free": "486.73 MB", "free_percentage": "100.0", "roevicts": "0", "pagein": "0", "pageout": "0" }, "ndncache": { "hit_ratio": "0", "free": "19.98 MB", "free_percentage": "99.9", "count": "119", "evictions": "0" }, "backends": { "userRoot": { "suffix": "dc=example,dc=com", "entry_cache_count": "5", "entry_cache_free": "1.31 GB", "entry_cache_free_percentage": "100.0", "entry_cache_size": "3.65 KB", "entry_cache_hit_ratio": "40", "dn_cache_count": "5", "dn_cache_free": "192.0 MB", "dn_cache_free_percentage": "100.0", "dn_cache_size": "67.0 B", "dn_cache_hit_ratio": "0", "indexes": [ { "name": "parentid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "uid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "id2entry.db", "cachehit": "8", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "objectclass.db", "cachehit": "14", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "nsuniqueid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "cn.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "ancestorid.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "numsubordinates.db", "cachehit": "0", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "aci.db", "cachehit": "3", "cachemiss": "0", "pagein": "0", "pageout": "0" }, { "name": "entryrdn.db", "cachehit": "16", "cachemiss": "0", "pagein": "0", "pageout": "0" } ] } } } INFO tests.suites.clu.dbmon_test:dbmon_test.py:133 Clear the log | |||
Passed | suites/clu/dbverify_test.py::test_dsctl_dbverify | 10.52 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ [04/Jun/2021:22:16:35.872343527 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dbverify_test:dbverify_test.py:63 Run dbverify INFO tests.suites.clu.dbverify_test:dbtasks.py:92 dbverify successful INFO tests.suites.clu.dbverify_test:dbverify_test.py:67 Check dbverify was successful -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dbverify_test:dbverify_test.py:33 Delete log file | |||
Passed | suites/clu/dsctl_acceptance_test.py::test_custom_path | 21.80 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/clu/dsidm_account_test.py::test_dsidm_account_entry_status_with_lock | 10.28 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:32 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:85 Test dsidm account entry-status INFO LogCapture:account.py:56 Entry DN: uid=test_user_1000,ou=people,dc=example,dc=com INFO LogCapture:account.py:62 Entry Creation Date: 20210605021709Z (2021-06-05 02:17:09) INFO LogCapture:account.py:62 Entry Modification Date: 20210605021709Z (2021-06-05 02:17:09) INFO LogCapture:account.py:63 Entry State: activated INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:89 Test dsidm account lock INFO LogCapture:account.py:109 Entry uid=test_user_1000,ou=people,dc=example,dc=com is locked INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:93 Test dsidm account entry-status with locked account INFO LogCapture:account.py:56 Entry DN: uid=test_user_1000,ou=people,dc=example,dc=com INFO LogCapture:account.py:62 Entry Creation Date: 20210605021709Z (2021-06-05 02:17:09) INFO LogCapture:account.py:62 Entry Modification Date: 20210605021709Z (2021-06-05 02:17:09) INFO LogCapture:account.py:63 Entry State: directly locked through nsAccountLock INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:97 Test dsidm account unlock INFO LogCapture:account.py:117 Entry uid=test_user_1000,ou=people,dc=example,dc=com is unlocked INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:101 Test dsidm account entry-status with unlocked account INFO LogCapture:account.py:56 Entry DN: uid=test_user_1000,ou=people,dc=example,dc=com INFO LogCapture:account.py:62 Entry Creation Date: 20210605021709Z (2021-06-05 02:17:09) INFO LogCapture:account.py:62 Entry Modification Date: 20210605021709Z (2021-06-05 02:17:09) INFO LogCapture:account.py:63 Entry State: activated -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_account_test:dsidm_account_test.py:37 Delete test user | |||
Passed | suites/clu/dsidm_config_test.py::test_dsidm_config_sssd | 12.23 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307 ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. # ldap_access_filter = (memberOf=<dn>) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307bis ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. ldap_access_filter = (memberOf=cn=new_group,ou=groups,dc=example,dc=com) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:101 Create sssd.conf content DEBUG tests.suites.clu.dsidm_config_test:client_config.py:114 # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307 ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. # ldap_access_filter = (memberOf=<dn>) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:104 Check if config creation was successful INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:107 Now we test allowed_group argument INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:108 Enable MemberOf plugin INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:113 Create test group INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:118 Create sssd.conf content with allowed group DEBUG tests.suites.clu.dsidm_config_test:client_config.py:114 # # sssd.conf # Generated by 389 Directory Server - dsidm # # For more details see man sssd.conf and man sssd-ldap # Be sure to review the content of this file to ensure it is secure and correct # in your environment. [domain/ldap] # Uncomment this for more verbose logging. # debug_level=3 # Cache hashes of user authentication for offline auth. cache_credentials = True id_provider = ldap auth_provider = ldap access_provider = ldap chpass_provider = ldap ldap_schema = rfc2307bis ldap_search_base = dc=example,dc=com ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records, you can use the following instead. This derives # from your ldap_search_base. # ldap_uri = _srv_ ldap_tls_reqcert = demand # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs ldap_tls_cacertdir = /etc/openldap/certs # Path to the cacert # ldap_tls_cacert = /etc/openldap/certs/ca.crt # Only users who match this filter can login and authorise to this machine. Note # that users who do NOT match, will still have their uid/gid resolve, but they # can't login. ldap_access_filter = (memberOf=cn=new_group,ou=groups,dc=example,dc=com) enumerate = false access_provider = ldap ldap_user_member_of = memberof ldap_user_gecos = cn ldap_user_uuid = nsUniqueId ldap_group_uuid = nsUniqueId # This is really important as it allows SSSD to respect nsAccountLock ldap_account_expire_policy = rhds ldap_access_order = filter, expire # Setup for ssh keys # Inside /etc/ssh/sshd_config add the lines: # AuthorizedKeysCommand /usr/bin/sss_ssh_authorizedkeys # AuthorizedKeysCommandUser nobody # You can test with the command: sss_ssh_authorizedkeys <username> ldap_user_ssh_public_key = nsSshPublicKey # This prevents an issue where the Directory is recursively walked on group # and user look ups. It makes the client faster and more responsive in almost # every scenario. ignore_group_members = False [sssd] services = nss, pam, ssh, sudo config_file_version = 2 domains = ldap [nss] homedir_substring = /home INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:123 Check if config creation was successful INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:47 Check if content is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:37 Delete log file | |||
Passed | suites/clu/dsidm_config_test.py::test_dsidm_config_ldap | 0.06 | |
------------------------------Captured stdout call------------------------------ # # OpenLDAP client configuration # Generated by 389 Directory Server - dsidm # # See ldap.conf(5) for details # This file should be world readable but not world writable. BASE dc=example,dc=com # Remember to check this: you can have multiple uris on this line. You may have # multiple servers or load balancers in your environment. URI ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records you can use: # URI ldaps:///dc%3Dexample%2Cdc%3Dcom DEREF never # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs TLS_CACERTDIR /etc/openldap/certs # TLS_CACERT /etc/openldap/certs/ca.crt -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:151 Create ldap.conf content DEBUG tests.suites.clu.dsidm_config_test:client_config.py:155 # # OpenLDAP client configuration # Generated by 389 Directory Server - dsidm # # See ldap.conf(5) for details # This file should be world readable but not world writable. BASE dc=example,dc=com # Remember to check this: you can have multiple uris on this line. You may have # multiple servers or load balancers in your environment. URI ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 # If you have DNS SRV records you can use: # URI ldaps:///dc%3Dexample%2Cdc%3Dcom DEREF never # To use cacert dir, place *.crt files in this path then run: # /usr/bin/openssl rehash /etc/openldap/certs # or (for older versions of openssl) # /usr/bin/c_rehash /etc/openldap/certs TLS_CACERTDIR /etc/openldap/certs # TLS_CACERT /etc/openldap/certs/ca.crt INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:154 Check if config creation was successful INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:37 Delete log file | |||
Passed | suites/clu/dsidm_config_test.py::test_dsidm_config_display | 6.97 | |
------------------------------Captured stdout call------------------------------ # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:195 Test dsidm display option DEBUG tests.suites.clu.dsidm_config_test:client_config.py:290 # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:198 Check if display option was successful INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:201 Enable MemberOf plugin INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:206 Test dsidm display option with MemberOf plugin DEBUG tests.suites.clu.dsidm_config_test:client_config.py:290 # This is a generic list of LDAP client configuration parameters you may require # for connecting a client to this server. Some of them may or may not apply # to your application, so consult your application documentation for further # assistance. # # This program makes a number of assumptions about your data and configuration # which may not be correct. Be sure to check these values for your situation. ; ldap uri ; This is the uri of the server you will connect to and authenticate to. It ; must be a valid subjectAltName in the presented TLS certificate. Note that this ; is not an exhaustive list of your LDAP servers, and other applications in your ; network like load balancers may affect this. This is just what we derive from ; your current connection. ldap_uri = ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 ; ldap dns discovery uri ; In some environments, you may have DNS SRV records such as ; "_ldap._tcp.<domain name>". If these are present in your dns server, you can ; use the following uri. ldap_uri = ldaps:///dc%3Dexample%2Cdc%3Dcom ; ca_cert ; To correctly use TLS, you require the valid CA cert that issued your LDAP TLS ; certificates. Sometimes a copy of this may be in your server instance as ca_cert = /etc/dirsrv/slapd-<instance>/ca.crt ; However that's not guaranteed. You can show the certs from the LDAP server ; by sshing to the server and running: certutil -L -d /etc/dirsrv/slapd-<instance>/ ; If you can identify the CA certificate name, you can then view it with: certutil -L -n <ca cert name> -a -d /etc/dirsrv/slapd-<instance>/ ; This should be a pem file you can use in your application's CA. ; Some applications don't require a ca certificate parameter, and will use the ; ca certificate from /etc/openldap/ldap.conf. You should configure ldap.conf ; in these cases. See the 'client_config ldap.conf' command in dsidm. ; basedn ; The basedn is the root suffix where all searches will originate from for ; LDAP objects. basedn = dc=example,dc=com ; schema_type ; LDAP servers have different ways to structure their objects and group ; relationships. Legacy servers will use rfc2307, where as modern servers will ; use rfc2307bis (requires MemberOf plugin to be enabled). This is the schema ; setting of your directory based on your running configuration (if we can ; detect it). schema_type = rfc2307bis ; user/account basedn ; Some applications may optionally use a user/account basedn to limit searches ; in the directory. This can be for performance or security reasons. Generally ; you shouldn't need this, preferring to use groups and filters for access ; control. user_basedn = ou=people,dc=example,dc=com ; user filter ; This is an ldap filter that will return only user objects. Additionally some ; applications will template into the filter (similar to sql statements) or they ; will generate the filter based on attributes. We list a number of possible ; filters you might use, but you should customise this for your application. ; ; If you are using rfc2307bis, you can use this filter to provide authorisation ; support by adding filters such as: (memberOf=<groupdn>) user_filter = (&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount)) user_filter = (&(&(objectclass=nsPerson)(objectclass=nsAccount)(objectclass=nsOrgPerson)(objectclass=posixAccount))(|(uid=<PARAM>)(displayName=<PARAM>)(cn=<PARAM>))) ; group basedn ; Some applications may optionnaly use a group basedn to limit searches in the ; directory. This can be for performance or security reasons. Generally you ; shouldn't need this, preferring to use groups and filters for access control. group_basedn = ou=Groups,dc=example,dc=com ; group filter ; This is an ldap filter that will return only group objects. Additionally ; some applications will template into the filter (similar to sql statements) ; or they will generate the filter base on attributes. We list a number of ; possible filters you might use, but you should customise this for your ; application. group_filter = (&(objectclass=groupOfNames)) group_filter = (&(&(objectclass=groupOfNames))(|(cn=<PARAM>))) ; attribute mappings ; Due to the variety of schemas and attribute mappings in LDAP, there are ; different representations of attributes and values. This is a guess at ; the mappings that exist in your server, and what attributes you should ; configure and use. unique id = nsUniqueId user rdn = uid user identifier = uid group rdn = cn group member attribute = member INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:209 Check if display option was successful with MemberOf plugin enabled INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:51 Check if content is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:56 Check if value is present in output INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:59 Reset log file for next test -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_config_test:dsidm_config_test.py:37 Delete log file | |||
Passed | suites/clu/dsidm_organizational_unit_test.py::test_dsidm_organizational_unit_delete | 10.14 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.clu.dsidm_organizational_unit_test:dsidm_organizational_unit_test.py:31 Create organizational unit -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_organizational_unit_test:dsidm_organizational_unit_test.py:72 Test dsidm organizationalunit delete INFO LogCapture._generic_delete:__init__.py:117 Successfully deleted ou=toDelete,dc=example,dc=com INFO tests.suites.clu.dsidm_organizational_unit_test:dsidm_organizational_unit_test.py:76 Check the entry is deleted -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_organizational_unit_test:dsidm_organizational_unit_test.py:39 Delete organizational unit | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_list | 9.26 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:82 Empty the log file to prevent false data to check about user INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:85 Test dsidm user list without json INFO LogCapture._generic_list:__init__.py:82 demo_user INFO LogCapture._generic_list:__init__.py:82 test_user_1000 INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:89 Test dsidm user list with json INFO LogCapture._generic_list:__init__.py:84 { "type": "list", "items": [ "demo_user", "test_user_1000" ] } INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:94 Delete the user INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:99 Test empty dsidm user list with json INFO LogCapture._generic_list:__init__.py:84 { "type": "list", "items": [ "demo_user" ] } INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:103 Test empty dsidm user list without json INFO LogCapture._generic_list:__init__.py:82 demo_user -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_get_rdn | 0.09 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:175 Empty the log file to prevent false data to check about user INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:178 Test dsidm user get without json INFO LogCapture._generic_get:__init__.py:96 dn: uid=test_user_1000,ou=people,dc=example,dc=com cn: test_user_1000 displayName: test_user_1000 gidNumber: 2000 homeDirectory: /home/test_user_1000 objectClass: top objectClass: nsPerson objectClass: nsAccount objectClass: nsOrgPerson objectClass: posixAccount uid: test_user_1000 uidNumber: 1000 INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:182 Test dsidm user get with json INFO LogCapture._generic_get:__init__.py:92 { "type": "entry", "dn": "uid=test_user_1000,ou=people,dc=example,dc=com", "attrs": { "objectclass": [ "top", "nsPerson", "nsAccount", "nsOrgPerson", "posixAccount" ], "uid": [ "test_user_1000" ], "cn": [ "test_user_1000" ], "displayname": [ "test_user_1000" ], "uidnumber": [ "1000" ], "gidnumber": [ "2000" ], "homedirectory": [ "/home/test_user_1000" ], "creatorsname": [ "cn=directory manager" ], "modifiersname": [ "cn=directory manager" ], "createtimestamp": [ "20210605021750Z" ], "modifytimestamp": [ "20210605021750Z" ], "nsuniqueid": [ "389c2e92-c5a411eb-8031f2df-89ec1e13" ], "parentid": [ "4" ], "entryid": [ "16" ], "entryuuid": [ "f0a32769-fbf9-4a31-97eb-33a5565784bc" ], "entrydn": [ "uid=test_user_1000,ou=people,dc=example,dc=com" ] } } -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_create | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:248 Test dsidm user create INFO LogCapture._generic_create:__init__.py:110 Successfully created new_user INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:252 Check that user is present INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:257 Clean up for next test | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_delete | 0.33 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:285 Test dsidm user delete INFO LogCapture._generic_delete:__init__.py:117 Successfully deleted uid=test_user_1000,ou=people,dc=example,dc=com INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:289 Check that user does not exist -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_modify | 0.36 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:318 Test dsidm user modify replace INFO LogCapture._generic_modify:__init__.py:337 Successfully modified uid=test_user_1000,ou=people,dc=example,dc=com INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:322 Test dsidm user modify add INFO LogCapture._generic_modify:__init__.py:337 Successfully modified uid=test_user_1000,ou=people,dc=example,dc=com INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:328 Test dsidm user modify delete INFO LogCapture._generic_modify:__init__.py:337 Successfully modified uid=test_user_1000,ou=people,dc=example,dc=com -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_rename_keep_old_rdn | 0.09 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:360 Test dsidm user rename INFO LogCapture._generic_rename:__init__.py:128 Successfully renamed to uid=my_user,ou=people,dc=example,dc=com INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:366 my_user should have uid attribute with the old rdn INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:371 Old user dn should not exist INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:374 Clean up -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
Passed | suites/clu/dsidm_user_test.py::test_dsidm_user_rename | 2.30 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:34 Create test user -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:403 Test dsidm user rename INFO LogCapture._generic_rename:__init__.py:128 Successfully renamed to uid=my_user,ou=people,dc=example,dc=com INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:410 New user should not have uid attribute with the old rdn INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:415 Old user dn should not exist. INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:418 Clean up -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.dsidm_user_test:dsidm_user_test.py:42 Delete test user | |||
Passed | suites/clu/dsrc_test.py::test_dsrc | 10.22 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO LogCapture:dsrc.py:264 [standalone1] INFO LogCapture:dsrc.py:266 basedn = dc=example,dc=com INFO LogCapture:dsrc.py:266 binddn = cn=Directory Manager INFO LogCapture:dsrc.py:267 INFO LogCapture:dsrc.py:264 [standalone1] INFO LogCapture:dsrc.py:266 basedn = dc=example,dc=com INFO LogCapture:dsrc.py:266 binddn = cn=Directory Manager INFO LogCapture:dsrc.py:267 INFO LogCapture:dsrc.py:264 [Second] INFO LogCapture:dsrc.py:266 basedn = o=second INFO LogCapture:dsrc.py:266 binddn = cn=Directory Manager INFO LogCapture:dsrc.py:267 INFO LogCapture:dsrc.py:264 [standalone1] INFO LogCapture:dsrc.py:266 basedn = dc=example,dc=com INFO LogCapture:dsrc.py:266 binddn = cn=Directory Manager INFO LogCapture:dsrc.py:267 INFO LogCapture:dsrc.py:264 [standalone1] INFO LogCapture:dsrc.py:266 basedn = o=different INFO LogCapture:dsrc.py:266 binddn = cn=Directory Manager INFO LogCapture:dsrc.py:267 INFO LogCapture:dsrc.py:264 [standalone1] INFO LogCapture:dsrc.py:266 binddn = cn=Directory Manager INFO LogCapture:dsrc.py:267 | |||
Passed | suites/clu/fixup_test.py::test_posix_winsync_fixup | 19.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.clu.fixup_test:fixup_test.py:73 Enable POSIXWinsyncPlugin INFO tests.suites.clu.fixup_test:fixup_test.py:77 Stopping the server and importing posix accounts INFO tests.suites.clu.fixup_test:fixup_test.py:87 Run Fixup task INFO tests.suites.clu.fixup_test:posix_winsync.py:29 Attempting to add task entry... INFO tests.suites.clu.fixup_test:posix_winsync.py:39 Successfully added task entry INFO tests.suites.clu.fixup_test:fixup_test.py:90 Check log if fixup task was successful -----------------------------Captured log teardown------------------------------ INFO tests.suites.clu.fixup_test:fixup_test.py:43 Delete files | |||
Passed | suites/clu/schema_test.py::test_origins_with_extra_parenthesis | 8.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/clu/schema_test.py::test_origins[attr1-99999.1-None] | 0.49 | |
No log output captured. | |||
Passed | suites/clu/schema_test.py::test_origins[attr2-99999.2-test-str] | 0.33 | |
No log output captured. | |||
Passed | suites/clu/schema_test.py::test_origins[attr3-99999.3-xorg2] | 0.23 | |
No log output captured. | |||
Passed | suites/clu/schema_test.py::test_origins[attr4-99999.4-test-tuple] | 1.46 | |
No log output captured. | |||
Passed | suites/config/autotuning_test.py::test_threads_basic | 7.85 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:39 Set nsslapd-threadnumber: -1 to enable autotuning INFO tests.suites.config.autotuning_test:autotuning_test.py:42 Assert nsslapd-threadnumber is equal to the documented expected value | |||
Passed | suites/config/autotuning_test.py::test_threads_warning | 1.37 | |
No log output captured. | |||
Passed | suites/config/autotuning_test.py::test_threads_invalid_value[-2] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: -2. Operation should fail | |||
Passed | suites/config/autotuning_test.py::test_threads_invalid_value[0] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: 0. Operation should fail | |||
Passed | suites/config/autotuning_test.py::test_threads_invalid_value[invalid] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:87 Set nsslapd-threadnumber: invalid. Operation should fail | |||
Passed | suites/config/autotuning_test.py::test_threads_back_from_manual_value | 0.35 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:109 Set nsslapd-threadnumber: -1 to enable autotuning and save the new value INFO tests.suites.config.autotuning_test:autotuning_test.py:113 Set nsslapd-threadnumber to the autotuned value decreased by 2 INFO tests.suites.config.autotuning_test:autotuning_test.py:118 Set nsslapd-threadnumber: -1 to enable autotuning INFO tests.suites.config.autotuning_test:autotuning_test.py:121 Assert nsslapd-threadnumber is back to the autotuned value | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[-] | 5.79 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize INFO tests.suites.config.autotuning_test:autotuning_test.py:190 Delete nsslapd-cache-autosize-split INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'201326592' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'25' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[-0] | 4.31 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize INFO tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'201326592' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'0' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-400] | 5.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'201326592' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'261400428' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'469762048' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[-40] | 4.32 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'261400428' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'469762048' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' INFO tests.suites.config.autotuning_test:autotuning_test.py:180 Delete nsslapd-cache-autosize INFO tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'816876339' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'1140850688' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'134217728' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-] | 4.72 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'816876339' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'1140850688' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'134217728' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' INFO tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO tests.suites.config.autotuning_test:autotuning_test.py:190 Delete nsslapd-cache-autosize-split INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'163375267' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'603979776' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'25' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-401] | 4.88 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'163375267' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'603979776' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 40 INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'261400428' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'469762048' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_non_zero[10-0] | 4.93 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:169 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:170 nsslapd-dbcachesize == b'261400428' INFO tests.suites.config.autotuning_test:autotuning_test.py:171 nsslapd-cachememsize == b'469762048' INFO tests.suites.config.autotuning_test:autotuning_test.py:172 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:173 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:174 nsslapd-cache-autosize-split == b'40' INFO tests.suites.config.autotuning_test:autotuning_test.py:177 Set nsslapd-cache-autosize to 10 INFO tests.suites.config.autotuning_test:autotuning_test.py:187 Set nsslapd-cache-autosize-split to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:196 Trying to set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:199 Trying to set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:216 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:217 nsslapd-dbcachesize == b'163375267' INFO tests.suites.config.autotuning_test:autotuning_test.py:218 nsslapd-cachememsize == b'603979776' INFO tests.suites.config.autotuning_test:autotuning_test.py:219 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:220 nsslapd-cache-autosize == b'10' INFO tests.suites.config.autotuning_test:autotuning_test.py:221 nsslapd-cache-autosize-split == b'0' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_basic_sane[0] | 8.89 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'163375267' INFO tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'603979776' INFO tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'0' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_basic_sane[] | 10.06 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:283 Delete nsslapd-cache-autosize-split INFO tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'408438169' INFO tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1409286144' INFO tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:283 Delete nsslapd-cache-autosize-split INFO tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'25' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_basic_sane[40] | 8.91 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'25' INFO tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 40 INFO tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 0 INFO tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'816876339' INFO tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'1140850688' INFO tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'40' INFO tests.suites.config.autotuning_test:autotuning_test.py:273 Check nsslapd-dbcachesize and nsslapd-cachememsize before the test INFO tests.suites.config.autotuning_test:autotuning_test.py:274 nsslapd-dbcachesize == b'816876339' INFO tests.suites.config.autotuning_test:autotuning_test.py:275 nsslapd-cachememsize == b'1140850688' INFO tests.suites.config.autotuning_test:autotuning_test.py:276 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:277 nsslapd-cache-autosize-split == b'40' INFO tests.suites.config.autotuning_test:autotuning_test.py:280 Set nsslapd-cache-autosize-split to 40 INFO tests.suites.config.autotuning_test:autotuning_test.py:289 Set nsslapd-dbcachesize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:291 Set nsslapd-cachememsize to 33333333 INFO tests.suites.config.autotuning_test:autotuning_test.py:307 Check nsslapd-dbcachesize and nsslapd-cachememsize in the appropriate range. INFO tests.suites.config.autotuning_test:autotuning_test.py:308 nsslapd-dbcachesize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:309 nsslapd-cachememsize == b'33333333' INFO tests.suites.config.autotuning_test:autotuning_test.py:310 nsslapd-dncachememsize == b'67108864' INFO tests.suites.config.autotuning_test:autotuning_test.py:311 nsslapd-cache-autosize == b'0' INFO tests.suites.config.autotuning_test:autotuning_test.py:312 nsslapd-cache-autosize-split == b'40' | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_invalid_values[-2] | 0.51 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to -2 INFO tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to -2 | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_invalid_values[102] | 0.37 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to 102 INFO tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to 102 | |||
Passed | suites/config/autotuning_test.py::test_cache_autosize_invalid_values[invalid] | 1.28 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.autotuning_test:autotuning_test.py:346 Set nsslapd-cache-autosize-split to invalid INFO tests.suites.config.autotuning_test:autotuning_test.py:352 Set nsslapd-cache-autosize to invalid | |||
Passed | suites/config/compact_test.py::test_compact_db_task | 23.27 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. | |||
Passed | suites/config/compact_test.py::test_compaction_interval_and_time | 10.05 | |
No log output captured. | |||
Passed | suites/config/config_delete_attr_test.py::test_delete_storagescheme | 8.65 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:42 Check we can modify passwordStorageScheme INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:46 Check removal of passwordStorageScheme is rejected | |||
Passed | suites/config/config_delete_attr_test.py::test_reset_attributes | 2.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:128 Change nsslapd-listenhost value to --> localhost INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:132 Now reset the attribute INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:135 nsslapd-listenhost is reset to None INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:128 Change nsslapd-securelistenhost value to --> localhost INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:132 Now reset the attribute INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:135 nsslapd-securelistenhost is reset to None INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:128 Change nsslapd-allowed-sasl-mechanisms value to --> GSSAPI INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:132 Now reset the attribute INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:135 nsslapd-allowed-sasl-mechanisms is reset to None INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:128 Change nsslapd-svrtab value to --> Some data INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:132 Now reset the attribute INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:135 nsslapd-svrtab is reset to None INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-localuser INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-defaultnamingcontext INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-accesslog INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-auditlog INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-errorlog INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-tmpdir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-rundir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-bakdir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-certdir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-instancedir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-ldifdir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-lockdir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-schemadir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-workingdir INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-localhost INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-certmap-basedn INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-port INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-secureport INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-conntablesize INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-rootpw INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-hash-filters INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-requiresrestart INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-plugin INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-privatenamespaces INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-allowed-to-delete-attrs INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-accesslog-list INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-auditfaillog-list INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-auditlog-list INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-errorlog-list INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-config INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-versionstring INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting objectclass INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting cn INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-backendconfig INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-betype INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-connection-buffer INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-malloc-mmap-threshold INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-malloc-mxfast INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-malloc-trim-threshold INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-referralmode INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting nsslapd-saslpath INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:145 Change was rejected, test passed INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:138 Resetting passwordadmindn INFO tests.suites.config.config_delete_attr_test:config_delete_attr_test.py:148 This attribute isn't part of cn=config, so is already default! | |||
Passed | suites/config/config_test.py::test_nagle_default_value | 7.94 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:60 Check the value of nsslapd-nagle attribute is off by default | |||
Passed | suites/config/config_test.py::test_maxbersize_repl | 40.37 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4eeac9db-757f-4415-bbb2-4131c63dab9b / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 51e3fe33-3144-4a2d-8429-0f6a5e5caa53 / got description=4eeac9db-757f-4415-bbb2-4131c63dab9b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:89 Set nsslapd-maxbersize: 20K to supplier2 INFO tests.suites.config.config_test:config_test.py:94 Try to add attribute with a big value to supplier2 - expect to FAIL INFO tests.suites.config.config_test:config_test.py:101 Try to add attribute with a big value to supplier1 - expect to PASS INFO tests.suites.config.config_test:config_test.py:106 Check if a big value was successfully added to supplier1 INFO tests.suites.config.config_test:config_test.py:110 Check if a big value was successfully replicated to supplier2 | |||
Passed | suites/config/config_test.py::test_config_listen_backport_size | 0.08 | |
No log output captured. | |||
Passed | suites/config/config_test.py::test_config_deadlock_policy | 0.13 | |
No log output captured. | |||
Passed | suites/config/config_test.py::test_defaultnamingcontext | 0.73 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:240 Check the attribute nsslapd-defaultnamingcontext is present in cn=config INFO tests.suites.config.config_test:config_test.py:243 Delete nsslapd-defaultnamingcontext attribute INFO tests.suites.config.config_test:config_test.py:250 modify nsslapd-defaultnamingcontext with new suffix INFO tests.suites.config.config_test:config_test.py:253 Add new invalid value at runtime to nsslapd-defaultnamingcontext INFO tests.suites.config.config_test:config_test.py:257 Modify nsslapd-defaultnamingcontext with blank value INFO tests.suites.config.config_test:config_test.py:260 Add new suffix when nsslapd-defaultnamingcontext is empty INFO tests.suites.config.config_test:config_test.py:264 Check the value of the nsslapd-defaultnamingcontext automatically have the new suffix INFO tests.suites.config.config_test:config_test.py:267 Adding new suffix when nsslapd-defaultnamingcontext is not empty INFO tests.suites.config.config_test:config_test.py:271 Check the value of the nsslapd-defaultnamingcontext has not changed INFO tests.suites.config.config_test:config_test.py:274 Remove the newly added suffix and check the values of the attribute is not changed INFO tests.suites.config.config_test:config_test.py:278 Remove all the suffix at the end | |||
Passed | suites/config/config_test.py::test_allow_add_delete_config_attributes | 4.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:328 Add a new valid attribute at runtime to cn=config INFO tests.suites.config.config_test:config_test.py:332 Delete nsslapd-listenhost to restore the default value INFO tests.suites.config.config_test:config_test.py:337 Add new invalid attribute at runtime to cn=config INFO tests.suites.config.config_test:config_test.py:341 Make sure the invalid attribute is not added | |||
Passed | suites/config/config_test.py::test_ignore_virtual_attrs | 0.67 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:374 Check the attribute nsslapd-ignore-virtual-attrs is present in cn=config INFO tests.suites.config.config_test:config_test.py:377 Check the default value of attribute nsslapd-ignore-virtual-attrs should be OFF INFO tests.suites.config.config_test:config_test.py:380 Set the valid values i.e. on/ON and off/OFF for nsslapd-ignore-virtual-attrs INFO tests.suites.config.config_test:config_test.py:385 Set invalid value for attribute nsslapd-ignore-virtual-attrs INFO tests.suites.config.config_test:config_test.py:396 Add cosPointer, cosTemplate and test entry to default suffix, where virtual attribute is postal code INFO tests.suites.config.config_test:config_test.py:409 Test if virtual attribute i.e. postal code shown in test entry while nsslapd-ignore-virtual-attrs: off INFO tests.suites.config.config_test:config_test.py:412 Set nsslapd-ignore-virtual-attrs=on INFO tests.suites.config.config_test:config_test.py:415 Test if virtual attribute i.e. postal code not shown while nsslapd-ignore-virtual-attrs: on | |||
Passed | suites/config/config_test.py::test_ndn_cache_enabled | 7.87 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.config_test:config_test.py:443 Check the attribute nsslapd-ndn-cache-enabled is present in cn=config INFO tests.suites.config.config_test:config_test.py:446 Check the attribute nsslapd-ndn-cache-enabled has the default value set as ON INFO tests.suites.config.config_test:config_test.py:449 Check the attribute nsslapd-ndn-cache-max-size is present in cn=config INFO tests.suites.config.config_test:config_test.py:455 Ticket#49593 : NDN cache stats should be under the global stats - Implemented in 1.4 INFO tests.suites.config.config_test:config_test.py:456 Fetch the monitor value according to the ds version INFO tests.suites.config.config_test:config_test.py:462 Check the backend monitor output for Normalized DN cache statistics, while nsslapd-ndn-cache-enabled is off INFO tests.suites.config.config_test:config_test.py:468 Check the backend monitor output for Normalized DN cache statistics, while nsslapd-ndn-cache-enabled is on INFO tests.suites.config.config_test:config_test.py:474 Set invalid value for nsslapd-ndn-cache-enabled INFO tests.suites.config.config_test:config_test.py:478 Set invalid value for nsslapd-ndn-cache-max-size | |||
Passed | suites/config/config_test.py::test_require_index | 1.17 | |
No log output captured. | |||
Passed | suites/config/config_test.py::test_require_internal_index | 9.93 | |
No log output captured. | |||
Passed | suites/config/regression_test.py::test_maxbersize_repl | 17.33 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.config.regression_test:regression_test.py:100 Set nsslapd-errorlog-maxlogsize before nsslapd-errorlog-logmaxdiskspace INFO tests.suites.config.regression_test:regression_test.py:104 Assert no init_dse_file errors in the error log INFO tests.suites.config.regression_test:regression_test.py:108 Set nsslapd-errorlog-maxlogsize after nsslapd-errorlog-logmaxdiskspace INFO tests.suites.config.regression_test:regression_test.py:112 Assert no init_dse_file errors in the error log | |||
Passed | suites/config/removed_config_49298_test.py::test_restore_config | 11.22 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.config.removed_config_49298_test:removed_config_49298_test.py:43 /etc/dirsrv/slapd-standalone1 | |||
Passed | suites/config/removed_config_49298_test.py::test_removed_config | 2.53 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.config.removed_config_49298_test:removed_config_49298_test.py:72 /etc/dirsrv/slapd-standalone1 | |||
Passed | suites/cos/cos_test.py::test_positive | 10.63 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/cos/indirect_cos_test.py::test_indirect_cos | 12.63 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.cos.indirect_cos_test:indirect_cos_test.py:113 Add custom schema... INFO tests.suites.cos.indirect_cos_test:indirect_cos_test.py:126 Add test user... INFO tests.suites.cos.indirect_cos_test:indirect_cos_test.py:143 Setup indirect COS... ------------------------------Captured stdout call------------------------------ Successfully created subtree password policy -------------------------------Captured log call-------------------------------- INFO tests.suites.cos.indirect_cos_test:indirect_cos_test.py:163 Checking user... INFO tests.suites.cos.indirect_cos_test:indirect_cos_test.py:60 Create password policy for subtree ou=people,dc=example,dc=com INFO tests.suites.cos.indirect_cos_test:indirect_cos_test.py:170 Checking user... | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_verify_operation_when_disk_monitoring_is_off | 20.51 | |
-----------------------------Captured stdout setup------------------------------ Relabeled /var/log/dirsrv/slapd-standalone1 from unconfined_u:object_r:user_tmp_t:s0 to system_u:object_r:dirsrv_var_log_t:s0 -----------------------------Captured stderr setup------------------------------ chown: cannot access '/var/log/dirsrv/slapd-standalone1/*': No such file or directory -------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ 25+0 records in 25+0 records out 26214400 bytes (26 MB, 25 MiB) copied, 0.011516 s, 2.3 GB/s dd: error writing '/var/log/dirsrv/slapd-standalone1/foo1': No space left on device 10+0 records in 9+0 records out 10465280 bytes (10 MB, 10 MiB) copied, 0.00727469 s, 1.4 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_enable_external_libs_debug_log | 34.63 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00462268 s, 2.3 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_free_up_the_disk_space_and_change_ds_config | 4.38 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_verify_operation_with_nsslapd_disk_monitoring_logging_critical_off | 34.62 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00520915 s, 2.0 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_on_below_half_of_the_threshold | 25.58 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0154651 s, 2.1 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_setting_nsslapd_disk_monitoring_logging_critical_to_off | 3.54 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_off | 67.06 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00700604 s, 1.5 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_operation_with_nsslapd_disk_monitoring_logging_critical_off_below_half_of_the_threshold | 154.62 | |
------------------------------Captured stderr call------------------------------ 30+0 records in 30+0 records out 31457280 bytes (31 MB, 30 MiB) copied, 0.0157963 s, 2.0 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_go_straight_below_half_of_the_threshold | 107.86 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0204017 s, 1.6 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_readonly_on_threshold | 28.17 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00474342 s, 2.2 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_readonly_on_threshold_below_half_of_the_threshold | 49.35 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0174395 s, 1.9 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_below_half_of_the_threshold_not_starting_after_shutdown | 110.83 | |
------------------------------Captured stderr call------------------------------ 31+0 records in 31+0 records out 32505856 bytes (33 MB, 31 MiB) copied, 0.0133958 s, 2.4 GB/s -------------------------------Captured log call-------------------------------- INFO lib389:disk_monitoring_test.py:619 Instance start up has failed as expected | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_go_straight_below_4kb | 18.06 | |
------------------------------Captured stderr call------------------------------ 25+0 records in 25+0 records out 26214400 bytes (26 MB, 25 MiB) copied, 0.0111186 s, 2.4 GB/s dd: error writing '/var/log/dirsrv/slapd-standalone1/foo1': No space left on device 10+0 records in 9+0 records out 10170368 bytes (10 MB, 9.7 MiB) copied, 0.00561717 s, 1.8 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_threshold_to_overflow_value | 0.10 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_threshold_is_reached_to_half | 14.63 | |
------------------------------Captured stderr call------------------------------ 10+0 records in 10+0 records out 10485760 bytes (10 MB, 10 MiB) copied, 0.00492644 s, 2.1 GB/s | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold--2] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-9223372036854775808] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-2047] | 0.07 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-0] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold--1294967296] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-threshold-invalid] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-invalid] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-1] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-00] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-525 948] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period--10] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-logging-critical-oninvalid] | 0.06 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period--11] | 0.07 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_negagtive_parameterize[nsslapd-disk-monitoring-grace-period-01] | 0.07 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_monitoring_test.py::test_valid_operations_are_permitted | 11.87 | |
No log output captured. | |||
Passed | suites/disk_monitoring/disk_space_test.py::test_basic | 10.13 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:disk_space_test.py:37 Check that "partition", "size", "used", "available", "use%" words are present in the string INFO lib389:disk_space_test.py:41 Check that the sizes are numbers | |||
Passed | suites/ds_logs/ds_logs_test.py::test_check_default | 7.88 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- DEBUG tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:233 on | |||
Passed | suites/ds_logs/ds_logs_test.py::test_plugin_set_invalid | 0.24 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:252 test_plugin_set_invalid - Expect to fail with junk value | |||
Passed | suites/ds_logs/ds_logs_test.py::test_log_plugin_on | 4.81 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:279 Bug 1273549 - Check access logs for millisecond, when attribute is ON INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:280 perform any ldap operation, which will trigger the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 10 users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:284 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:287 parse the access logs -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:172 Removing all added users | |||
Passed | suites/ds_logs/ds_logs_test.py::test_log_plugin_off | 12.28 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:319 Bug 1273549 - Check access logs for missing millisecond, when attribute is OFF INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:321 test_log_plugin_off - set the configuration attribute to OFF INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:324 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:327 test_log_plugin_off - delete the previous access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 10 users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:334 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:337 check access log that microseconds are not present -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:172 Removing all added users | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_server_level_0 | 8.10 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:365 Set nsslapd-plugin-logging to on INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:368 Configure access log level to 0 INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:372 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:376 Check if access log does not contain internal log of MOD operation INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:382 Check if the other internal operations are not present -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:153 Stopping the instance INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:155 Deleting the access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:157 Starting the instance | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_server_level_4 | 9.13 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:414 Set nsslapd-plugin-logging to on INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:417 Configure access log level to 4 INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:421 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:426 Check if access log contains internal MOD operation in correct format INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:432 Check if the other internal operations have the correct format -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:153 Stopping the instance INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:155 Deleting the access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:157 Starting the instance | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_level_260 | 13.59 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:102 Enable automember plugin INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:106 Enable Referential Integrity plugin INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:110 Set nsslapd-plugin-logging to on INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:113 Restart the server INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:120 Configure access log level to 260 INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:86 Renaming user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:89 Delete the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:92 Delete automember entry, org. unit and group for the next test INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:472 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:476 Check the access logs for ADD operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:492 Check the access logs for MOD operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:506 Check the access logs for DEL operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:518 Check if the other internal operations have the correct format -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:126 Delete the previous access logs for the next test | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_level_131076 | 9.04 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:120 Configure access log level to 131076 INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:86 Renaming user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:89 Delete the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:92 Delete automember entry, org. unit and group for the next test INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:557 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:561 Check the access logs for ADD operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:577 Check the access logs for MOD operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:591 Check the access logs for DEL operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:603 Check if the other internal operations have the correct format -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:126 Delete the previous access logs for the next test | |||
Passed | suites/ds_logs/ds_logs_test.py::test_internal_log_level_516 | 9.45 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:120 Configure access log level to 516 INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:86 Renaming user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:89 Delete the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:92 Delete automember entry, org. unit and group for the next test INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:642 Restart the server to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:646 Check the access logs for ADD operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:663 Check the access logs for MOD operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:680 Check the access logs for DEL operation of the user INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:695 Check if the other internal operations have the correct format -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:126 Delete the previous access logs for the next test | |||
Passed | suites/ds_logs/ds_logs_test.py::test_access_log_truncated_search_message | 8.22 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:722 Make a search INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:725 Restart the server to flush the logs -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:153 Stopping the instance INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:155 Deleting the access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:157 Starting the instance | |||
Passed | suites/ds_logs/ds_logs_test.py::test_etime_order_of_magnitude | 6.12 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:811 add_users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 30 users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:814 search users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:817 parse the access logs to get the SRCH string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:825 get the operation start time from the SRCH string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:829 get the OP number from the SRCH string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:833 get the RESULT string matching the SRCH OP number INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:842 get the operation end time from the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:846 get the logged etime for the operation from the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:853 Calculate the ratio between logged etime for the operation and elapsed time from its start time to its end time - should be around 1 -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:172 Removing all added users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:153 Stopping the instance INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:155 Deleting the access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:157 Starting the instance | |||
Passed | suites/ds_logs/ds_logs_test.py::test_optime_and_wtime_keywords | 4.12 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:890 add_users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:36 Adding 30 users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:893 search users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:896 parse the access logs to get the SRCH string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:904 get the OP number from the SRCH string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:908 get the RESULT string matching the SRCH op number INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:915 Search for the wtime keyword in the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:918 get the wtime value from the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:921 Search for the optime keyword in the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:924 get the optime value from the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:927 get the etime value from the RESULT string INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:930 Check that (wtime + optime) is approximately equal to etime i.e. their ratio is 1 -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:172 Removing all added users INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:153 Stopping the instance INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:155 Deleting the access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:157 Starting the instance | |||
Passed | suites/ds_logs/ds_logs_test.py::test_log_base_dn_when_invalid_attr_request | 3.67 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:200 Disable access log buffering -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:962 delete the previous access logs to get a fresh new one INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:965 Search the default suffix, with invalid '"" ""' attribute request INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:966 A Protocol error exception should be raised, see https://github.com/389ds/389-ds-base/issues/3028 INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:972 Check the access logs for correct messages -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:203 Enable access log buffering | |||
Passed | suites/ds_logs/ds_logs_test.py::test_audit_log_rotate_and_check_string | 12.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1020 Doing modifications to rotate audit log INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1025 Doing one more modification just in case INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1032 Check that DS string is present on first line INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1036 Check that DS string is present only once -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:187 Setting audit log config back to default values INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:153 Stopping the instance INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:155 Deleting the access logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:157 Starting the instance | |||
Passed | suites/ds_logs/ds_logs_test.py::test_enable_external_libs_debug_log | 18.86 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1074 Create a user to bind on INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1086 Set nsslapd-external-libs-debug-enabled to "on" INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1089 Clean the error log INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1092 Bind as the user to generate OpenLDAP output INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1095 Restart the servers to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1098 Check the error log for OpenLDAP debug log INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1101 Set nsslapd-external-libs-debug-enabled to "off" INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1104 Clean the error log INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1107 Bind as the user to generate OpenLDAP output INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1110 Restart the servers to flush the logs INFO tests.suites.ds_logs.ds_logs_test:ds_logs_test.py:1113 Check the error log for OpenLDAP debug log | |||
Passed | suites/ds_logs/regression_test.py::test_default_loglevel_stripped[24576] | 7.95 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/ds_logs/regression_test.py::test_default_loglevel_stripped[16512] | 0.27 | |
No log output captured. | |||
Passed | suites/ds_logs/regression_test.py::test_default_loglevel_stripped[16385] | 0.95 | |
No log output captured. | |||
Passed | suites/ds_logs/regression_test.py::test_dse_config_loglevel_error | 12.93 | |
No log output captured. | |||
Passed | suites/ds_tools/logpipe_test.py::test_user_permissions | 10.29 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.ds_tools.logpipe_test:logpipe_test.py:32 Add system test user - dirsrv_testuser -------------------------------Captured log call-------------------------------- INFO tests.suites.ds_tools.logpipe_test:logpipe_test.py:68 Try to create a logpipe in the log directory with "-u" option specifying the user -----------------------------Captured log teardown------------------------------ INFO tests.suites.ds_tools.logpipe_test:logpipe_test.py:41 Delete system test user - dirsrv_testuser | |||
Passed | suites/ds_tools/replcheck_test.py::test_state | 57.94 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 69127cf3-3ca1-4d4f-b928-eba3388dfaf8 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 849a7e6d-0e67-4e57-b6bf-680da1a23804 / got description=69127cf3-3ca1-4d4f-b928-eba3388dfaf8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect e25622e0-6b34-455c-8951-bec51999945f / got description=849a7e6d-0e67-4e57-b6bf-680da1a23804) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO tests.suites.ds_tools.replcheck_test:replcheck_test.py:101 Export LDAPTLS_CACERTDIR env variable for ds-replcheck INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 9723e2d2-a37f-4145-af3a-0ecbcfb75043 / got description=e25622e0-6b34-455c-8951-bec51999945f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 1542cd2f-967c-4ef8-a958-a109db2fd5ac / got description=9723e2d2-a37f-4145-af3a-0ecbcfb75043) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working | |||
Passed | suites/ds_tools/replcheck_test.py::test_check_ruv | 17.25 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_supplier1.ldif ldiffile: /tmp/export_supplier2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_missing_entries | 19.03 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_supplier1.ldif ldiffile: /tmp/export_supplier2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_tombstones | 21.18 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_supplier1.ldif ldiffile: /tmp/export_supplier2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_conflict_entries | 28.82 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_supplier1.ldif ldiffile: /tmp/export_supplier2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_inconsistencies | 22.46 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_supplier1.ldif ldiffile: /tmp/export_supplier2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_suffix_exists | 0.38 | |
No log output captured. | |||
Passed | suites/ds_tools/replcheck_test.py::test_check_missing_tombstones | 18.44 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/export_supplier1.ldif ldiffile: /tmp/export_supplier2.ldif | |||
Passed | suites/ds_tools/replcheck_test.py::test_dsreplcheck_with_password_file | 0.08 | |
No log output captured. | |||
Passed | suites/ds_tools/replcheck_test.py::test_dsreplcheck_timeout_connection_mechanisms | 5.19 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.ds_tools.replcheck_test:replcheck_test.py:545 Run ds-replcheck with -t option | |||
Passed | suites/dynamic_plugins/notice_for_restart_test.py::test_notice_when_dynamic_not_enabled | 14.66 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/entryuuid/basic_test.py::test_entryuuid_indexed_import_and_search | 12.37 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/entryuuid/basic_test.py::test_entryuuid_unindexed_import_and_search | 4.13 | |
No log output captured. | |||
Passed | suites/entryuuid/basic_test.py::test_entryuuid_generation_on_add | 0.07 | |
------------------------------Captured stdout call------------------------------ fb7c52bd-f333-4c93-ac3d-9cb4f027e5e9 | |||
Passed | suites/entryuuid/basic_test.py::test_entryuuid_fixup_task | 22.82 | |
No log output captured. | |||
Passed | suites/entryuuid/replicated_test.py::test_entryuuid_with_replication | 37.92 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 31564716-1fd5-46c5-a0b2-2d6d68d36df1 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e58e8cce-052d-4689-b357-b97f3dd43ae7 / got description=31564716-1fd5-46c5-a0b2-2d6d68d36df1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stdout call------------------------------ 🧩 ['ebc1f2c9-39ec-4d6d-9ec6-8d0e38b9fd0f'] 🧩 ['ebc1f2c9-39ec-4d6d-9ec6-8d0e38b9fd0f'] 🧩 ['ebc1f2c9-39ec-4d6d-9ec6-8d0e38b9fd0f'] -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 82679e5b-c655-43fb-907a-b3309a292944 / got description=e58e8cce-052d-4689-b357-b97f3dd43ae7) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 82679e5b-c655-43fb-907a-b3309a292944 / got description=e58e8cce-052d-4689-b357-b97f3dd43ae7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 24b454c7-fa0b-433d-8aea-40096f59783a / got description=82679e5b-c655-43fb-907a-b3309a292944) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/export/export_test.py::test_dbtasks_db2ldif_with_non_accessible_ldif_file_path | 12.73 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/nonexistent/export.ldif -------------------------------Captured log call-------------------------------- INFO lib389.utils:export_test.py:63 Stopping the instance... INFO lib389.utils:export_test.py:66 Performing an offline export to a non accessible ldif file path - should fail properly CRITICAL LogCapture:dbtasks.py:41 db2ldif failed INFO lib389.utils:export_test.py:33 checking output msg INFO lib389.utils:export_test.py:38 Clear the log INFO lib389.utils:export_test.py:70 parsing the errors log to search for the error reported INFO lib389.utils:export_test.py:79 Restarting the instance... | |||
Passed | suites/export/export_test.py::test_db2ldif_cli_with_non_accessible_ldif_file_path | 5.07 | |
------------------------------Captured stdout call------------------------------ db2ldif failed ------------------------------Captured stderr call------------------------------ ldiffile: /tmp/nonexistent/export.ldif -------------------------------Captured log call-------------------------------- INFO lib389.utils:export_test.py:104 Stopping the instance... INFO lib389.utils:export_test.py:107 Performing an offline export to a non accessible ldif file path - should fail properly INFO lib389.utils:export_test.py:115 db2ldif failed properly: error (1) INFO lib389.utils:export_test.py:118 parsing the errors log to search for the error reported INFO lib389.utils:export_test.py:121 error string : '[04/Jun/2021:22:44:13.265402440 -0400] - ERR - bdb_db2ldif - db2ldif: userRoot: can\'t open /tmp/nonexistent/export.ldif: 2 (Unexpected dbimpl error code) while running as user "dirsrv"\n', '[04/Jun/2021:22:44:18.284800306 -0400] - ERR - bdb_db2ldif - db2ldif: userRoot: can\'t open /tmp/nonexistent/export.ldif: 2 (Unexpected dbimpl error code) while running as user "dirsrv"\n' INFO lib389.utils:export_test.py:123 Restarting the instance... | |||
Passed | suites/filter/basic_filter_test.py::test_search_attr | 10.75 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/bitw_filter_test.py::test_bitwise_plugin_status | 9.68 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/bitw_filter_test.py::test_search_disabled_accounts | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_plugin_can_be_disabled | 3.66 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_plugin_is_disabled | 0.07 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_enabling_works_fine | 4.61 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=513))-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=16777216))-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=8388608))-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=5))-3] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=8))-3] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=7))-5] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testerperson) (testUserAccountControl:1.2.840.113556.1.4.804:=7))-0] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.803:=98536)(testUserAccountControl:1.2.840.113556.1.4.803:=912)))-0] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.804:=87)(testUserAccountControl:1.2.840.113556.1.4.804:=91)))-8] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (&(testUserAccountControl:1.2.840.113556.1.4.803:=89536)(testUserAccountControl:1.2.840.113556.1.4.804:=79)))-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (|(testUserAccountControl:1.2.840.113556.1.4.803:=89536)(testUserAccountControl:1.2.840.113556.1.4.804:=79)))-8] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (|(testUserAccountControl:1.2.840.113556.1.4.803:=89)(testUserAccountControl:1.2.840.113556.1.4.803:=536)))-0] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=x))-13] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=&\*#$%))-13] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-65536))-0] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-1))-0] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=-))-13] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=))-13] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=\*))-13] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.804:=\*))-0] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=6552))-0] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson\))(testUserAccountControl:1.2.840.113556.1.4.804:=6552))-0] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_all_together[(& (objectclass=testperson) (testUserAccountControl:1.2.840.113556.1.4.803:=65536))-5] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries | 0.55 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries1 | 0.33 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries3 | 0.10 | |
No log output captured. | |||
Passed | suites/filter/bitw_filter_test.py::test_5_entries4 | 1.71 | |
No log output captured. | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(sn=last1)(givenname=first1))-1] | 8.68 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(sn=last1)(givenname=first1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(sn=last1)(givenname=first1)))-1] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(sn=last1)(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=first1))))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=first1))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(sn=last3)(givenname=*))-1] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(sn=last3)(givenname=*))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(&(sn=last3)(givenname=*)))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(&(sn=last3)(givenname=*)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid5)(&(&(sn=*))(&(givenname=*))))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid5)(&(&(sn=*))(&(givenname=*))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(objectclass=*)(uid=*)(sn=last*))-5] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(objectclass=*)(uid=*)(sn=last*))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(objectclass=*)(uid=*)(sn=last1))-1] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(objectclass=*)(uid=*)(sn=last1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(sn=last1)(givenname=first1))-1] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(sn=last1)(givenname=first1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(|(sn=last1)(givenname=first1)))-1] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(|(sn=last1)(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=uid1)(|(|(sn=last1))(|(givenname=first1))))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=uid1)(|(|(sn=last1))(|(givenname=first1))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(objectclass=*)(sn=last1)(|(givenname=first1)))-18] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(objectclass=*)(sn=last1)(|(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last1))(|(givenname=first1)))-1] | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last1))(|(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last))(|(givenname=first1)))-1] | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last))(|(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(!(cn=NULL)))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(!(cn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(!(cn=NULL))(uid=uid1))-1] | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(!(cn=NULL))(uid=uid1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(&(!(uid=1))(!(givenname=first1))))-5] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(&(!(uid=1))(!(givenname=first1))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(uid=NULL))(sn=last1))-1] | 0.24 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(uid=NULL))(sn=last1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(uid=NULL))(!(sn=NULL)))-1] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(uid=NULL))(!(sn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(|(uid=uid1)(sn=last2))(givenname=first1))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(|(uid=uid1)(sn=last2))(givenname=first1))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(uid=uid1)(!(uid=NULL)))(sn=last2))-2] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid=uid1)(!(uid=NULL)))(sn=last2))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(uid=uid1)(uid=NULL))(sn=last2))-1] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid=uid1)(uid=NULL))(sn=last2))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid5)(sn=*)(cn=*)(givenname=*)(uid=u*)(sn=la*)(cn=full*)(givenname=f*)(uid>=u)(!(givenname=NULL)))-1] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid5)(sn=*)(cn=*)(givenname=*)(uid=u*)(sn=la*)(cn=full*)(givenname=f*)(uid>=u)(!(givenname=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(objectclass=*)(sn=last))(&(givenname=first1)))-1] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(objectclass=*)(sn=last))(&(givenname=first1)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(sn=last1)(givenname=NULL))-0] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(sn=last1)(givenname=NULL))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(sn=last1)(givenname=NULL)))-0] | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(sn=last1)(givenname=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL))))-0] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL)(sn=*)))(|(sn=NULL)))-0] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last1))(&(givenname=NULL)(sn=*)))(|(sn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=uid1)(&(&(sn=last*))(&(givenname=first*)))(&(sn=NULL)))-0] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=uid1)(&(&(sn=last*))(&(givenname=first*)))(&(sn=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(sn=NULL)(givenname=NULL))-0] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(sn=NULL)(givenname=NULL))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(|(sn=NULL)(givenname=NULL)))-0] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(|(sn=NULL)(givenname=NULL)))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=NULL)(|(|(sn=NULL))(|(givenname=NULL))))-0] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=NULL)(|(|(sn=NULL))(|(givenname=NULL))))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*))-6] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*)(uid=*))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(uid>=uid3)-3] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(uid>=uid3)"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid=*)(uid>=uid3))-3] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid=*)(uid>=uid3))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(uid>=uid3)(uid<=uid5))-6] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(uid>=uid3)(uid<=uid5))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(&(uid>=uid3)(uid<=uid5))-3] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(&(uid>=uid3)(uid<=uid5))"... | |||
Passed | suites/filter/complex_filters_test.py::test_filters[(|(&(uid>=uid3)(uid<=uid5))(uid=*))-6] | 1.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.complex_filters_test:complex_filters_test.py:130 Testing filter "(|(&(uid>=uid3)(uid<=uid5))(uid=*))"... | |||
Passed | suites/filter/filter_cert_test.py::test_positive | 34.99 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index0] | 9.30 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index2] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index3] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index4] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index5] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index6] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index7] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index8] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index9] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index10] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index11] | 0.35 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index12] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index13] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index14] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index15] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index16] | 0.18 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_valid_invalid_attributes[index17] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod0] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod2] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod3] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod4] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod5] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod6] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod7] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod8] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod9] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod10] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod11] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod12] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod13] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod14] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod15] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod16] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods[mod17] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode0] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode1] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode2] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode3] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode4] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode5] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode6] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode7] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode8] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode9] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode10] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode11] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode12] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode13] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode14] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode15] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode16] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_replace[mode17] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode0] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode2] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode3] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode4] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode5] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode6] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode7] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode8] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode9] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode10] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode11] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode12] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode13] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode14] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode15] | 0.34 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode16] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_mods_delete[mode17] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_search_positive_negative | 2.15 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrbitStringMatch:bitStringMatch:='0001'B)-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match:caseExactIA5Match:=Sprain)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch:caseExactOrderingMatch:=ÇélIné Ändrè)-5] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeMatch:=20100218171300Z)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeOrderingMatch:=20100218171300Z)-6] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrbooleanMatch:booleanMatch:=TRUE)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match:caseIgnoreIA5Match:=sprain1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch:caseIgnoreMatch:=ÇélIné Ändrè1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch:caseIgnoreOrderingMatch:=ÇélIné Ändrè1)-6] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch:caseIgnoreListMatch:=foo1$bar)-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrobjectIdentifierMatch:objectIdentifierMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrdirectoryStringFirstComponentMatch:directoryStringFirstComponentMatch:=ÇélIné Ändrè1)-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrobjectIdentifierFirstComponentMatch:objectIdentifierFirstComponentMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrdistinguishedNameMatch:distinguishedNameMatch:=cn=foo1,cn=bar)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerMatch:integerMatch:=-2)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerMatch:integerOrderingMatch:=-2)-6] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrintegerFirstComponentMatch:integerFirstComponentMatch:=-2)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attruniqueMemberMatch:uniqueMemberMatch:=cn=foo1,cn=bar#'0001'B)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-10] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-11] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch:telephoneNumberMatch:=+1 408 555 4798)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attroctetStringMatch:octetStringMatch:=AAAAAAAAAAAAAAE=)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attroctetStringMatch:octetStringOrderingMatch:=AAAAAAAAAAAAAAE=)-6] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=ÇélIné Ändrè*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactMatch=*é Ä*)-5] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*Sprain*)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=Sprain*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*Sprain)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseExactIA5Match=*rai*)-3] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*sprain1*)-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=sprain1*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*sprain1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreIA5Match=*rai*)-6] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=ÇélIné Ändrè1*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreMatch=*é Ä*)-6] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=foo1$bar*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrcaseIgnoreListMatch=*1$b*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*00001*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=00001*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*00001)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrnumericStringMatch=*000*)-6] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=+1 408 555 4798*)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_index_match_test.py::test_do_extensible_search[(attrtelephoneNumberMatch=* 55*)-6] | 1.05 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=people))(|(ou=nothing2)(ou=nothing3)))] | 9.32 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=people)(ou=nothing1))(|(ou=nothing2)(ou=nothing3)))] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=nothing2))(|(ou=people)(ou=nothing3)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(|(|(ou=nothing1)(ou=nothing2))(|(ou=nothing3)(ou=people)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(&(sn<=0000000000000000)(givenname>=FFFFFFFFFFFFFFFF))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(&(sn>=0000000000000000)(sn<=1111111111111111))] | 0.06 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_positive[(&(sn>=0000000000000000)(givenname<=FFFFFFFFFFFFFFFF))] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_schema | 3.36 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(uidNumber=18446744073709551617)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(gidNumber=18446744073709551617)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(MYINTATTR=18446744073709551617)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(uidNumber=*)(!(uidNumber=18446744073709551617)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(gidNumber=*)(!(gidNumber=18446744073709551617)))] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(uidNumber=*)(!(gidNumber=18446744073709551617)))] | 0.25 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(&(myintattr=*)(!(myintattr=18446744073709551617)))] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(uidNumber>=-18446744073709551617)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(gidNumber>=-18446744073709551617)] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(uidNumber<=18446744073709551617)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(gidNumber<=18446744073709551617)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing[(myintattr<=18446744073709551617)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber=54321)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber=54321)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr=54321)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber<=-999999999999999999999999999999)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber<=-999999999999999999999999999999)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr<=-999999999999999999999999999999)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(gidNumber>=999999999999999999999999999999)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(uidNumber>=999999999999999999999999999999)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_indexing_test.py::test_indexing_negative[(myintattr>=999999999999999999999999999999)] | 2.44 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_eq | 8.79 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_logic_test.py::test_sub | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_not_eq | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_ranges | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_eq | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_range | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_allid_shortcut | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_eq | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_not_eq | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_not_eq | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_range | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_range | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_and_eq | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_or_eq | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_and_or_eq | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_logic_test.py::test_or_and_eq | 1.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_matching_rules | 8.87 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_match_test.py::test_add_attribute_types | 1.71 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule0] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule2] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule3] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule4] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule5] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule6] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule7] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule8] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule9] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule10] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule11] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule12] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule13] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule14] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule15] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule16] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_attributes[rule17] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode0] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode1] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode2] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode3] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode4] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode5] | 0.35 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode6] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode7] | 0.35 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode8] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode9] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode10] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode11] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode12] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode13] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode14] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode15] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode16] | 0.35 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_modes[mode17] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode0] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode1] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode2] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode3] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode4] | 0.36 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode5] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode6] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode7] | 0.40 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode8] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode9] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode10] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode11] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode12] | 0.15 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode13] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode14] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode15] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode16] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_valid_invalid_mode_replace[mode17] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrbitStringMatch='0001'B)-1-(attrbitStringMatch:bitStringMatch:='000100000'B)] | 1.64 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrgeneralizedTimeMatch=20100218171300Z)-1-(attrcaseExactIA5Match=SPRAIN)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseExactMatch>=ÇélIné Ändrè)-5-(attrcaseExactMatch=ÇéLINé ÄNDRè)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1-(attrcaseExactMatch>=çéliné ändrè)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseExactIA5Match=Sprain)-1-(attrgeneralizedTimeMatch=20300218171300Z)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrbooleanMatch=TRUE)-1-(attrgeneralizedTimeMatch>=20300218171300Z)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreIA5Match=sprain1)-1-(attrcaseIgnoreIA5Match=sprain9999)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreMatch=ÇélIné Ändrè1)-1-(attrcaseIgnoreMatch=ÇélIné Ändrè9999)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreMatch>=ÇélIné Ändrè1)-6-(attrcaseIgnoreMatch>=ÇélIné Ändrè9999)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrcaseIgnoreListMatch=foo1$bar)-1-(attrcaseIgnoreListMatch=foo1$bar$baz$biff)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrobjectIdentifierMatch=1.3.6.1.4.1.1466.115.121.1.15)-1-(attrobjectIdentifierMatch=1.3.6.1.4.1.1466.115.121.1.15.99999)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrgeneralizedTimeMatch>=20100218171300Z)-6-(attroctetStringMatch>=AAAAAAAAAAABAQQ=)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrdirectoryStringFirstComponentMatch=ÇélIné Ändrè1)-1-(attrdirectoryStringFirstComponentMatch=ÇélIné Ändrè9999)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrobjectIdentifierFirstComponentMatch=1.3.6.1.4.1.1466.115.121.1.15)-1-(attrobjectIdentifierFirstComponentMatch=1.3.6.1.4.1.1466.115.121.1.15.99999)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrdistinguishedNameMatch=cn=foo1,cn=bar)-1-(attrdistinguishedNameMatch=cn=foo1,cn=bar,cn=baz)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrintegerMatch=-2)-1-(attrintegerMatch=-20)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrintegerMatch>=-2)-6-(attrintegerMatch>=20)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrintegerFirstComponentMatch=-2)-1-(attrintegerFirstComponentMatch=-20)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attruniqueMemberMatch=cn=foo1,cn=bar#'0001'B)-1-(attruniqueMemberMatch=cn=foo1,cn=bar#'00010000'B)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrnumericStringMatch=00001)-1-(attrnumericStringMatch=000000001)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrnumericStringMatch>=00001)-6-(attrnumericStringMatch>=01)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attrtelephoneNumberMatch=+1 408 555 4798)-1-(attrtelephoneNumberMatch=+2 408 555 4798)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attroctetStringMatch=AAAAAAAAAAAAAAE=)-1-(attroctetStringMatch=AAAAAAAAAAAAAAEB)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_match_count[(attroctetStringMatch>=AAAAAAAAAAAAAAE=)-6-(attroctetStringMatch>=AAAAAAAAAAABAQE=)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrbitStringMatch:bitStringMatch:='0001'B)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match:caseExactIA5Match:=Sprain)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch:caseExactMatch:=ÇélIné Ändrè)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch:caseExactOrderingMatch:=ÇélIné Ändrè)-5] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeMatch:=20100218171300Z)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrgeneralizedTimeMatch:generalizedTimeOrderingMatch:=20100218171300Z)-6] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrbooleanMatch:booleanMatch:=TRUE)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match:caseIgnoreIA5Match:=sprain1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch:caseIgnoreMatch:=ÇélIné Ändrè1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch:caseIgnoreOrderingMatch:=ÇélIné Ändrè1)-6] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch:caseIgnoreListMatch:=foo1$bar)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrobjectIdentifierMatch:objectIdentifierMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrdirectoryStringFirstComponentMatch:directoryStringFirstComponentMatch:=ÇélIné Ändrè1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrobjectIdentifierFirstComponentMatch:objectIdentifierFirstComponentMatch:=1.3.6.1.4.1.1466.115.121.1.15)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrdistinguishedNameMatch:distinguishedNameMatch:=cn=foo1,cn=bar)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrintegerMatch:integerMatch:=-2)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrintegerMatch:integerOrderingMatch:=-2)-6] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrintegerFirstComponentMatch:integerFirstComponentMatch:=-2)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attruniqueMemberMatch:uniqueMemberMatch:=cn=foo1,cn=bar#'0001'B)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-10] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch:numericStringMatch:=00001)-11] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch:telephoneNumberMatch:=+1 408 555 4798)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attroctetStringMatch:octetStringMatch:=AAAAAAAAAAAAAAE=)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attroctetStringMatch:octetStringOrderingMatch:=AAAAAAAAAAAAAAE=)-6] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè*)-1] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=ÇélIné Ändrè*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*ÇélIné Ändrè)-1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactMatch=*é Ä*)-5] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*Sprain*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=Sprain*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*Sprain)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseExactIA5Match=*rai*)-3] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*sprain1*)-1] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=sprain1*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*sprain1)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreIA5Match=*rai*)-6] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1*)-1] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=ÇélIné Ändrè1*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*ÇélIné Ändrè1)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreMatch=*é Ä*)-6] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=foo1$bar*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*foo1$bar)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrcaseIgnoreListMatch=*1$b*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*00001*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=00001*)-1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*00001)-1] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrnumericStringMatch=*000*)-6] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=+1 408 555 4798*)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=*+1 408 555 4798)-1] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_match_test.py::test_extensible_search[(attrtelephoneNumberMatch=* 55*)-6] | 1.54 | |
No log output captured. | |||
Passed | suites/filter/filter_test.py::test_filter_escaped | 8.76 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.filter.filter_test:filter_test.py:42 Running test_filter_escaped... INFO tests.suites.filter.filter_test:filter_test.py:78 test_filter_escaped: PASSED | |||
Passed | suites/filter/filter_test.py::test_filter_search_original_attrs | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.filter_test:filter_test.py:95 Running test_filter_search_original_attrs... INFO tests.suites.filter.filter_test:filter_test.py:108 test_filter_search_original_attrs: PASSED | |||
Passed | suites/filter/filter_test.py::test_filter_scope_one | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.filter_test:filter_test.py:125 Search user using ldapsearch with scope one INFO tests.suites.filter.filter_test:filter_test.py:127 [dn: ou=services,dc=example,dc=com ou: services ] INFO tests.suites.filter.filter_test:filter_test.py:129 Search should only have one entry | |||
Passed | suites/filter/filter_test.py::test_filter_with_attribute_subtype | 0.17 | |
-------------------------------Captured log call-------------------------------- INFO lib389:filter_test.py:154 Bind as cn=Directory Manager INFO lib389:filter_test.py:161 ######################### ADD ###################### INFO lib389:filter_test.py:184 Try to add Add cn=test_entry both, dc=example,dc=com: dn: cn=test_entry both, dc=example,dc=com cn: test_entry both cn;en: test_entry en cn;fr: test_entry fr objectclass: top objectclass: person sn: test_entry both INFO lib389:filter_test.py:187 Try to add Add cn=test_entry en only, dc=example,dc=com: dn: cn=test_entry en only, dc=example,dc=com cn: test_entry en only cn;en: test_entry en objectclass: top objectclass: person sn: test_entry en only INFO lib389:filter_test.py:190 ######################### SEARCH ###################### INFO lib389:filter_test.py:194 Try to search with filter (&(sn=test_entry en only)(!(cn=test_entry fr))) INFO lib389:filter_test.py:198 Found cn=test_entry en only,dc=example,dc=com INFO lib389:filter_test.py:202 Try to search with filter (&(sn=test_entry en only)(!(cn;fr=test_entry fr))) INFO lib389:filter_test.py:206 Found cn=test_entry en only,dc=example,dc=com INFO lib389:filter_test.py:210 Try to search with filter (&(sn=test_entry en only)(!(cn;en=test_entry en))) INFO lib389:filter_test.py:213 Found none INFO lib389:filter_test.py:215 ######################### DELETE ###################### INFO lib389:filter_test.py:217 Try to delete cn=test_entry both, dc=example,dc=com INFO lib389:filter_test.py:220 Try to delete cn=test_entry en only, dc=example,dc=com INFO tests.suites.filter.filter_test:filter_test.py:223 Testcase PASSED | |||
Passed | suites/filter/filter_test.py::test_extended_search | 1.68 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.filter.filter_test:filter_test.py:250 Running test_filter_escaped... INFO lib389:filter_test.py:267 Try to search with filter (cn:de:=ext-test-entry) INFO lib389:filter_test.py:273 Try to search with filter (cn:caseIgnoreIA5Match:=EXT-TEST-ENTRY) INFO lib389:filter_test.py:279 Try to search with filter (cn:caseIgnoreMatch:=EXT-TEST-ENTRY) INFO lib389:filter_test.py:285 Try to search with filter (cn:caseExactMatch:=EXT-TEST-ENTRY) INFO lib389:filter_test.py:291 Try to search with filter (cn:caseExactMatch:=ext-test-entry) INFO lib389:filter_test.py:297 Try to search with filter (cn:caseExactIA5Match:=EXT-TEST-ENTRY) INFO lib389:filter_test.py:303 Try to search with filter (cn:caseExactIA5Match:=ext-test-entry) | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_telephone[(telephonenumber=*7393)] | 14.32 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_telephone[(telephonenumber=*408*3)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=mward)] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=sunnyvale)0] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(mail=jreu*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(mail=*exam*)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1:=>AAA)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:es:=>AAA)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.5:=AAA)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1:=>user100)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:es:=>user100)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.5:=user100)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.1:=user1)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.15.1.1:=z)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=user1)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid<=Z)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=1)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=A)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid>=user20)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.2:=user20)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.2:=z)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=A)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=A)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.4:=A)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=user20)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=user20)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1.4:=user20)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:2.16.840.1.113730.3.3.2.18.1:=>=z)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid:fr:=>=z)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(description=This is the special * attribute value)] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(description=*x*)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=ptyler)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=*wal*)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=0312)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(uid=mw*)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=2295)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=Cupertino)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(objectclass=inetorgperson)] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=sunnyvale)1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=200)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=201)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(roomNumber=202)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(l=*)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(sn~=tiller))(!(uid=ptyler)))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(sn~=tiller)) (uid=ptyler))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (roomNumber=0312))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=*wal*))(!(roomNumber=0312)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=*wal*))(roomNumber=0312))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(!(roomNumber=0312)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*)(|(sn~=tiller) (roomNumber=2295)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(&(uid=*wal*) (roomNumber=2295))(&(uid=*wal*) (sn~=tiller)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))0] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(|(uid=*wal*) (sn~=tiller))(|(uid=*wal*) (roomNumber=2295)))0] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*) (roomNumber=2295))0] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(roomNumber=2295) (uid=*wal*))0] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(roomNumber=2295) (uid=*wal*))0] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))1] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (roomNumber=2295))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(uid=*wal*) (l=*))] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino) (|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino)(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295))))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(|(uid=*wal*)(&(sn~=tiller) (roomNumber=2295)))))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user1))(objectclass=inetorgperson))] | 0.36 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user1))(objectclass=inetorgperson))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(mail=cnewport@example.com))(l=sunnyvale))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(|(uid=*wal*) (sn~=tiller))(|(uid=*wal*) (roomNumber=2295)))1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(uid=*wal*) (roomNumber=2295))1] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(roomNumber=2295) (uid=*wal*))1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(roomNumber=2295) (uid=*wal*))1] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(roomNumber=2254))(&(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=2254))(&(!(roomNumber=200))(!(roomNumber=201))(!(roomNumber=202))(l=sunnyvale)))] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.14 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(& (objectclass=inetorgperson)(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(| (objectclass=inetorgperson)(!(uid=user1))(!(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20))(!(uid=user30)))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=sunnyvale)(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.13 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=sunnyvale)(!(roomNumber=4012))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(!(|(!(l=*))(!(l=sunnyvale))))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=*))(!(l=sunnyvale)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(!(l=Cupertino))(mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(!(l=Cupertino))(mail=*exam*) (|(uid=*wal*) (l=*)))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino)(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(|(l=Cupertino) (mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_positive[(&(l=Cupertino)(!(mail=*exam*))(|(uid=*wal*) (l=*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(userpassword=*)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(fred=*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<1)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<1)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1.1:=1)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<user1)] | 0.33 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<user1)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.15.1:=<z)] | 0.34 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:es:=<z)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid<=1)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid<=A)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid>=Z)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=A)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=A)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1.2:=A)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=user20)] | 0.33 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=user20)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1:=<=z)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:fr:=<=z)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid:2.16.840.1.113730.3.3.2.18.1.4:=z)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(sn~=tiller)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(givenName~=pricella)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(mail=cnewport@example.com)] | 0.34 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user20)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user30)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(uid=user40)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(sn~=tiller) (givenName~=pricella))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(sn~=tiller)(!(uid=ptyler)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(!(l=Cupertino))(mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino)(!(mail=*exam*))(!(|(uid=*wal*) (l=*))))] | 0.12 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino) (mail=*exam*)(!(|(uid=*wal*) (l=*))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/filter_with_non_root_user_test.py::test_all_negative[(&(l=Cupertino) (mail=*exam*) (|(uid=*wal*) (l=*)))] | 2.70 | |
No log output captured. | |||
Passed | suites/filter/filterscanlimit_test.py::test_invalid_configuration | 9.27 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/filterscanlimit_test.py::test_idlistscanlimit | 11.67 | |
No log output captured. | |||
Passed | suites/filter/large_filter_test.py::test_large_filter[(&(objectClass=person)(|(manager=uid=fmcdonnagh,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_0,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_1,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_2,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_3,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_4,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_5,dc=anuj,dc=com)(manager=uid=jvedder, dc=anuj, dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_6,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_7,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_8,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_9,dc=anuj,dc=com)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_10,dc=anuj,dc=com)(manager=uid=cnewport, dc=anuj, dc=com)))] | 11.09 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/large_filter_test.py::test_large_filter[(&(objectClass=person)(|(manager=uid=fmcdonnagh *)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_0,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_1,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_2,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_3,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_4,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_5,*)(manager=uid=jvedder,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_6,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_7,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_8,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_9,*)(manager=cn=no_such_entry_with_a_really_long_dn_component_to_stress_the_filter_handling_code_10,*)(manager=uid=cnewport,*)))] | 2.76 | |
No log output captured. | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_supported_features | 8.61 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-] | 0.43 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-*] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-objectClass] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-] | 0.34 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-*] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-objectClass] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-*] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-objectClass] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-*] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-objectClass] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-*] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-objectClass] | 0.32 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-*] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-objectClass] | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:158 bound as: uid=all_attrs_test,ou=people,dc=example,dc=com | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-*] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-objectClass] | 1.80 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:rfc3673_all_oper_attrs_test.py:161 bound as: cn=Directory Manager | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_config | 8.86 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_enabled | 3.98 | |
No log output captured. | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_warn_safe | 0.17 | |
No log output captured. | |||
Passed | suites/filter/schema_validation_test.py::test_filter_validation_warn_unsafe | 2.25 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition0-cn] | 11.61 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition1-cn] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition2-cn] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition3-cn] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition4-modifiersName] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition5-modifyTimestamp] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition6-modifiersName] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition7-modifyTimestamp] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition8-modifiersName] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition9-modifyTimestamp] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition10-cn] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition11-cn] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition12-modifiersName] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition13-nsRoleDN] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition14-cn] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition15-modifiersName] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (| (nsRoleDN=cn=new managed role) (sn=Hall)) (l=sunnyvale))-condition16-nsRoleDN] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition17-mailquota] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition18-mailquota] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition19-mailquota] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition20-mailquota] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(uid=rjense2)-condition21-nsRoleDN] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(nsRoleDN=cn=new managed *)-condition22-cn] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(nsRoleDN=cn=new managed *)-condition23-nsRoleDN] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition24-mailquota] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition25-nsRoleDN] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition26-mailquota] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition27-modifiersName] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition28-nsRoleDN] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition29-nsRoleDN] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_attribute_test.py::test_all_together_positive[(& (nsRoleDN=cn=new managed *) (uid=mtyler))-condition30-modifiersName] | 1.53 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode=99999)] | 11.30 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalAddress=345 California Av., Mountain View, CA)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode:2.16.840.1.113730.3.3.2.7.1:=88888)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalCode:2.16.840.1.113730.3.3.2.7.1.3:=66666)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass=vpe*)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass=*emai*)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota=*00)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota=*6*0)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(nsRole=*)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(postalAddress=*)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1:=>AAA)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:es:=>AAA)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1.5:=AAA)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:2.16.840.1.113730.3.3.2.15.1:=>vpemail)] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(emailclass:es:=>vpemail)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.15.1.1:=900)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota<=600)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota>=600)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(nsRole~=cn=new)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(uid=*wal*)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(uid=mw*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(roomNumber=0312)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(l=Cupertino)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(uid=user1)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(objectclass=inetorgperson)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(l=sunnyvale)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(roomNumber=3924)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(l=*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(objectclass=*)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota<=900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota>=100)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=600)] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=900)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=600)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=600)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=600)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1:=>=100)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:fr:=>=100)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=100)] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole~=cn=new managed))(!(nsRole=cn=new vaddr filtered role,dc=example,dc=com)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*) (nsRole=cn=*another*))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=*wal*))(!(nsRole=cn=*another*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=*wal*))(nsRole=cn=*another*))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(!(nsRole=cn=*another*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(uid=*wal*)(|(nsRole~=cn=new managed) (l=Cupertino)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(&(uid=*wal*) (l=Cupertino))(&(uid=*wal*) (nsRole~=cn=new managed)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(&(nsRole~=cn=new managed) (l=Cupertino)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(|(uid=*wal*) (nsRole~=cn=new managed))(|(uid=*wal*) (l=Cupertino)))] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(nsRole=cn=*vaddr*) (uid=*wal*))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(uid=*wal*) (nsRole=cn=*vaddr*))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(nsRole=cn=*vaddr*) (l=*))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino) (|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino)(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*))))] | 0.21 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(|(uid=*wal*)(&(nsRole~=cn=new managed) (nsRole=cn=*vaddr*)))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(objectclass=inetorgperson))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(objectclass=inetorgperson))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=*vaddr*))(l=sunnyvale))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=*vaddr*))(l=sunnyvale))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user40))(&(!(uid=user1))(!(uid=user20))(!(uid=user30))(objectclass=inetorgperson)))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(roomNumber=2254))(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale)))] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(roomNumber=2254))(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508))(l=sunnyvale)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(& (objectclass=inetorgperson)(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(| (objectclass=inetorgperson)(!(uid=user1))(!(uid=user20))(!(uid=user30)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=sunnyvale)(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=sunnyvale)(!(nsRole=cn=another vaddr role,dc=example,dc=com))(!(roomNumber=3924))(!(roomNumber=4508)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(!(|(!(l=*))(!(l=sunnyvale))))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=*))(!(l=sunnyvale)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino) (emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(!(l=Cupertino))(emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(&(l=Cupertino)(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(!(l=Cupertino))(emailclass=*emai*) (|(nsRole=cn=*vaddr*) (l=*)))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino)(!(emailclass=*emai*))(|(nsRole=cn=*vaddr*) (l=*)))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_positive[(|(l=Cupertino) (emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(postalCode:de:==77777)] | 0.24 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(fred=*)] | 0.11 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.5:=vpemail)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1:=<1)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:es:=<1)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.1:=1)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1:=<vpemail)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:es:=<vpemail)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(emailclass:2.16.840.1.113730.3.3.2.15.1.1:=vpemail)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.15.1:=<900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:es:=<900)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota<=100)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota>=900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole~=cn=new managed)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=new vaddr filtered role,dc=example,dc=com)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=*another*)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=*vaddr*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user20)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user30)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(nsRole=cn=another vaddr role,dc=example,dc=com)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(roomNumber=4508)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(uid=user40)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(roomNumber=2254)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=100)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=100)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1.2:=100)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=600)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=600)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1:=<=900)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:fr:=<=900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(mailquota:2.16.840.1.113730.3.3.2.18.1.4:=900)] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(Description=This is the special \2a attribute value)] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(Description=*\2a*)] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole~=cn=new managed) (nsRole=cn=new vaddr filtered role,dc=example,dc=com))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(!(nsRole~=cn=new managed)) (nsRole=cn=new vaddr filtered role,dc=example,dc=com))] | 0.10 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole~=cn=new managed)(!(nsRole=cn=new vaddr filtered role,dc=example,dc=com)))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(uid=*wal*) (nsRole=cn=*vaddr*))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(nsRole=cn=*vaddr*) (uid=*wal*))] | 0.09 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(!(l=Cupertino))(emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.08 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(l=Cupertino)(!(emailclass=*emai*))(!(|(nsRole=cn=*vaddr*) (l=*))))] | 0.07 | |
No log output captured. | |||
Passed | suites/filter/vfilter_simple_test.py::test_param_negative[(&(l=Cupertino) (emailclass=*emai*)(!(|(nsRole=cn=*vaddr*) (l=*))))] | 1.11 | |
No log output captured. | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_verify_trees | 65.08 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier4 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'supplier4', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d3f3787-dfb9-4bc2-a5f9-289327deb8f9 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 5f15870e-ac6d-432f-abfc-93a743def71f / got description=3d3f3787-dfb9-4bc2-a5f9-289327deb8f9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect caaefbb0-7e62-4d98-bb62-66edd5a34baf / got description=5f15870e-ac6d-432f-abfc-93a743def71f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 5bdf43e0-a7db-4f9d-a9e4-688742099287 / got description=caaefbb0-7e62-4d98-bb62-66edd5a34baf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:156 Joining supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3 / got description=5bdf43e0-a7db-4f9d-a9e4-688742099287) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 47f6f733-6a30-4cee-8172-b5600c58f95f / got description=5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 47f6f733-6a30-4cee-8172-b5600c58f95f / got description=5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 47f6f733-6a30-4cee-8172-b5600c58f95f / got description=5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 47f6f733-6a30-4cee-8172-b5600c58f95f / got description=5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 47f6f733-6a30-4cee-8172-b5600c58f95f / got description=5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 47f6f733-6a30-4cee-8172-b5600c58f95f / got description=5abaaf1e-a8f4-45d8-aef9-c90e1f9147c3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier4 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 70e51b65-3172-4dc9-99e9-42efa5c56bc1 / got description=47f6f733-6a30-4cee-8172-b5600c58f95f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 9e80765c-0e91-46aa-b914-38db47de8871 / got description=70e51b65-3172-4dc9-99e9-42efa5c56bc1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 55fa95f6-acaa-4536-af27-cfa9dcbd8b1e / got description=9e80765c-0e91-46aa-b914-38db47de8871) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_sync_through_to_all_4_suppliers | 3.43 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1eda1803-ddc4-4725-93b3-280dfafa1670 / got description=55fa95f6-acaa-4536-af27-cfa9dcbd8b1e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c0f045b6-91c9-4c2b-b784-3bd9250c3586 / got description=1eda1803-ddc4-4725-93b3-280dfafa1670) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 4f2485e7-3cf5-499e-8d29-d3221141f47c / got description=c0f045b6-91c9-4c2b-b784-3bd9250c3586) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_modify_some_data_in_m3 | 8.89 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 349ed713-b259-4005-a567-82471f23d69c / got description=4f2485e7-3cf5-499e-8d29-d3221141f47c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9eaece84-2537-4533-a21c-02ca6f00e7e9 / got description=349ed713-b259-4005-a567-82471f23d69c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 70fd34f8-c2af-472c-a950-682d56bed1ea / got description=9eaece84-2537-4533-a21c-02ca6f00e7e9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_delete_a_few_entries_in_m4 | 5.48 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 393355dc-1719-4748-90a5-77668880f548 / got description=70fd34f8-c2af-472c-a950-682d56bed1ea) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9f3c27c2-4024-4edb-9089-fb1894a68dfe / got description=393355dc-1719-4748-90a5-77668880f548) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9f3c27c2-4024-4edb-9089-fb1894a68dfe / got description=393355dc-1719-4748-90a5-77668880f548) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2e050e54-43ec-4dc7-b711-199492af4eb3 / got description=9f3c27c2-4024-4edb-9089-fb1894a68dfe) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 1c6a11c8-38b9-4934-a7d9-ac00ab052207 / got description=2e050e54-43ec-4dc7-b711-199492af4eb3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_replicated_multivalued_entries | 1.14 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 043c7f7a-d262-4dc0-83f7-f5a4870f0932 / got description=1c6a11c8-38b9-4934-a7d9-ac00ab052207) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_bad_replication_agreement | 21.65 | |
No log output captured. | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_nsds5replicaenabled_verify | 68.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fa1bb740-1dd0-42c9-a464-b95a2eed8423 / got description=043c7f7a-d262-4dc0-83f7-f5a4870f0932) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ef2a4783-6663-4828-9dd8-16f847d8b187 / got description=fa1bb740-1dd0-42c9-a464-b95a2eed8423) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f17794cb-6323-4e51-9d91-80b461b5ba3e / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fae8c6ea-b0c2-403d-bc89-b52171c98482 / got description=ef2a4783-6663-4828-9dd8-16f847d8b187) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2500 FAIL: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c8429576-7051-400c-9977-bb258d913627 / got description=fae8c6ea-b0c2-403d-bc89-b52171c98482) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 59d54f93-c489-4bc1-ae56-70ccf2a83999 / got description=c8429576-7051-400c-9977-bb258d913627) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_create_an_entry_on_the_supplier | 2.21 | |
No log output captured. | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_bob_acceptance_tests | 5.37 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 071c6346-ca78-46b4-a98c-268a32324b56 / got description=59d54f93-c489-4bc1-ae56-70ccf2a83999) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/fourwaymmr/fourwaymmr_test.py::test_replica_backup_and_restore | 37.41 | |
------------------------------Captured stderr call------------------------------ ldiffile: /tmp/output_file [04/Jun/2021:22:53:01.918053181 -0400] - INFO - slapd_exemode_ldif2db - Backend Instance: userRoot -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 92d50f08-c07a-4d58-b76c-a2301ad4a79b / got description=071c6346-ca78-46b4-a98c-268a32324b56) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e7d5a106-63aa-4703-a741-e1f107f99a6c / got description=92d50f08-c07a-4d58-b76c-a2301ad4a79b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4f88f585-2f25-4a73-a8a5-0915000e4d79 / got description=e7d5a106-63aa-4703-a741-e1f107f99a6c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ab4db7ad-33a1-46a0-9438-c583b3c2f81b / got description=4f88f585-2f25-4a73-a8a5-0915000e4d79) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5b31449e-a036-49ea-a530-4051d0971995 / got description=ab4db7ad-33a1-46a0-9438-c583b3c2f81b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 52c9c55d-b736-467a-b0a8-98acf8bac772 / got description=5b31449e-a036-49ea-a530-4051d0971995) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/fractional/fractional_test.py::test_fractional_agreements | 80.66 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 064a4222-3081-4959-bfd5-974396ccdb24 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 404a2815-061c-4d9b-b370-2d42df09de72 / got description=064a4222-3081-4959-bfd5-974396ccdb24) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c6f3771a-de8c-4c23-b7cb-10386f49ec5b / got description=404a2815-061c-4d9b-b370-2d42df09de72) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:169 Joining consumer consumer2 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect f9248787-83f3-4b16-ab67-a50695cd49a8 / got description=c6f3771a-de8c-4c23-b7cb-10386f49ec5b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 already exists INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is was created -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 03574ed8-005e-4bb7-b407-fcf1430d212d / got description=f9248787-83f3-4b16-ab67-a50695cd49a8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 929e689b-d887-4427-a5d6-667bc595f2d7 / got description=03574ed8-005e-4bb7-b407-fcf1430d212d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 7e632ec7-b0b6-4ba2-bd8a-3ce5485972a7 / got description=929e689b-d887-4427-a5d6-667bc595f2d7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_read_only_consumer | 0.40 | |
No log output captured. | |||
Passed | suites/fractional/fractional_test.py::test_read_write_supplier | 3.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d6a46a80-851c-449a-9cbe-6a92cc73b74d / got description=7e632ec7-b0b6-4ba2-bd8a-3ce5485972a7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c9cef351-6f60-443e-8871-aca764c60dbf / got description=d6a46a80-851c-449a-9cbe-6a92cc73b74d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect c080f86f-67de-48ea-a592-f5044d7b7d0e / got description=c9cef351-6f60-443e-8871-aca764c60dbf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_filtered_attributes | 3.15 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 887c55ae-abf4-4ffb-a575-39b1c1eb827d / got description=c080f86f-67de-48ea-a592-f5044d7b7d0e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 1c3c41b5-8f64-4a62-b486-be1d7e7d2385 / got description=887c55ae-abf4-4ffb-a575-39b1c1eb827d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 5a14b7da-4d7e-40b1-8ff5-ceb6a5ff678e / got description=1c3c41b5-8f64-4a62-b486-be1d7e7d2385) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_fewer_changes_in_single_operation | 9.23 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 98213b90-f63c-48ff-8ef8-e8fc8ddf47c3 / got description=5a14b7da-4d7e-40b1-8ff5-ceb6a5ff678e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 13ba490f-f1d6-4521-bf18-4cd381714ee1 / got description=98213b90-f63c-48ff-8ef8-e8fc8ddf47c3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 01c112b7-34ce-427c-82c8-c5674584526d / got description=13ba490f-f1d6-4521-bf18-4cd381714ee1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ee6a1f76-5143-49ba-afda-f92e86c6a2a0 / got description=01c112b7-34ce-427c-82c8-c5674584526d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4cab27eb-3180-440f-bf5b-0b0a9f88c122 / got description=ee6a1f76-5143-49ba-afda-f92e86c6a2a0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect e7a04235-04ba-4b62-9806-db9a89b96484 / got description=4cab27eb-3180-440f-bf5b-0b0a9f88c122) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6628afa9-d069-4ae1-9331-3017ffd85918 / got description=e7a04235-04ba-4b62-9806-db9a89b96484) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 05883490-d3ff-4094-b3c4-361df799c987 / got description=6628afa9-d069-4ae1-9331-3017ffd85918) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 58fcaf31-5fec-4fcb-936c-011f4f442637 / got description=05883490-d3ff-4094-b3c4-361df799c987) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_newly_added_attribute_nsds5replicatedattributelisttotal | 11.89 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ca30b4e3-bc38-4e69-b818-30feee2c6ca0 / got description=58fcaf31-5fec-4fcb-936c-011f4f442637) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ca30b4e3-bc38-4e69-b818-30feee2c6ca0 / got description=58fcaf31-5fec-4fcb-936c-011f4f442637) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ca30b4e3-bc38-4e69-b818-30feee2c6ca0 / got description=58fcaf31-5fec-4fcb-936c-011f4f442637) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ef13713a-3ce2-4488-8118-73f3bd848be1 / got description=ca30b4e3-bc38-4e69-b818-30feee2c6ca0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect d67d2615-7948-42de-a53b-ca1061aa9e7a / got description=ef13713a-3ce2-4488-8118-73f3bd848be1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_attribute_nsds5replicatedattributelisttotal | 28.21 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 43f159d1-33be-4d12-bc3f-503496854e49 / got description=d67d2615-7948-42de-a53b-ca1061aa9e7a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect f4ee26e5-3b35-48ca-a4aa-51625864c7fa / got description=43f159d1-33be-4d12-bc3f-503496854e49) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect f32d859a-fcbe-4473-9a36-010b91d128e5 / got description=f4ee26e5-3b35-48ca-a4aa-51625864c7fa) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/fractional/fractional_test.py::test_implicit_replication_of_password_policy | 20.27 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 255ff249-aa27-4082-be91-d7e55ab90af7 / got description=f32d859a-fcbe-4473-9a36-010b91d128e5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 5a6a8cc2-18bc-40f7-b314-e9cefaa0d2ee / got description=255ff249-aa27-4082-be91-d7e55ab90af7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect d99ad218-a52d-4463-9b55-11da53011081 / got description=5a6a8cc2-18bc-40f7-b314-e9cefaa0d2ee) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 076e6b07-6419-48c1-aa37-6824e5aa2425 / got description=d99ad218-a52d-4463-9b55-11da53011081) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 076e6b07-6419-48c1-aa37-6824e5aa2425 / got description=d99ad218-a52d-4463-9b55-11da53011081) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 076e6b07-6419-48c1-aa37-6824e5aa2425 / got description=d99ad218-a52d-4463-9b55-11da53011081) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 076e6b07-6419-48c1-aa37-6824e5aa2425 / got description=d99ad218-a52d-4463-9b55-11da53011081) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect d0da4eb7-67eb-49f1-bed1-3d5678fda299 / got description=076e6b07-6419-48c1-aa37-6824e5aa2425) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 2f6d2788-a3b7-41d0-b42a-e1c4b6a2b0ed / got description=d0da4eb7-67eb-49f1-bed1-3d5678fda299) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6c5c44c0-66cc-4454-b1a3-67e19ffab824 / got description=076e6b07-6419-48c1-aa37-6824e5aa2425) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect d4a9ac60-bd98-4f46-b9ee-a15b5dec8a02 / got description=6c5c44c0-66cc-4454-b1a3-67e19ffab824) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 59802f63-c64d-462c-b90d-b7baf90bd19a / got description=d4a9ac60-bd98-4f46-b9ee-a15b5dec8a02) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working | |||
Passed | suites/get_effective_rights/acceptance_test.py::test_group_aci_entry_exists | 9.15 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:30 Adding user testuser -------------------------------Captured log call-------------------------------- INFO tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:57 Adding group group1 INFO tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:70 Add an ACI granting add access to a user matching the groupdn INFO lib389:acceptance_test.py:79 dn: uid=testuser,dc=example,dc=com INFO lib389:acceptance_test.py:81 ######## entryLevelRights: b'vadn' | |||
Passed | suites/get_effective_rights/acceptance_test.py::test_group_aci_template_entry | 1.59 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.get_effective_rights.acceptance_test:acceptance_test.py:105 Add an ACI granting add access to a user matching the userdn INFO lib389:acceptance_test.py:115 dn: cn=template_person_objectclass,dc=example,dc=com INFO lib389:acceptance_test.py:117 ######## entryLevelRights: b'vadn' INFO lib389:acceptance_test.py:120 dn: cn=template_groupofnames_objectclass,dc=example,dc=com INFO lib389:acceptance_test.py:122 ######## entryLevelRights: b'none' | |||
Passed | suites/gssapi_repl/gssapi_repl_test.py::test_gssapi_repl | 28.91 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6bea5e9a-267a-4f5c-8702-75698ab3274d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 365c3d2d-93ed-4023-84d8-69f7c8045ba9 / got description=6bea5e9a-267a-4f5c-8702-75698ab3274d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_logging_format_should_be_revised | 8.90 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSCLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: config:hr_timestamp INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=config INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 nsslapd-logging-hr-timestamps-enabled changes the log format in directory server from [07/Jun/2017:17:15:58 +1000] to [07/Jun/2017:17:15:58.716117312 +1000] This actually provides a performance improvement. Additionally, this setting will be removed in a future release. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Set nsslapd-logging-hr-timestamps-enabled to on. You can use 'dsconf' to set this attribute. Here is an example: # dsconf slapd-standalone1 config replace nsslapd-logging-hr-timestamps-enabled=on INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSCLE0001", "severity": "LOW", "description": "Different log timestamp format.", "items": [ "cn=config" ], "detail": "nsslapd-logging-hr-timestamps-enabled changes the log format in directory server from\n\n[07/Jun/2017:17:15:58 +1000]\n\nto\n\n[07/Jun/2017:17:15:58.716117312 +1000]\n\nThis actually provides a performance improvement. Additionally, this setting will be\nremoved in a future release.\n", "fix": "Set nsslapd-logging-hr-timestamps-enabled to on.\nYou can use 'dsconf' to set this attribute. Here is an example:\n\n # dsconf slapd-standalone1 config replace nsslapd-logging-hr-timestamps-enabled=on", "check": "config:hr_timestamp" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_RI_plugin_is_misconfigured | 0.39 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSRILE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: refint:update_delay INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=referential integrity postoperation,cn=plugins,cn=config INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The referential integrity plugin has an asynchronous processing mode. This is controlled by the update-delay flag. When this value is 0, referential integrity plugin processes these changes inside of the operation that modified the entry - ie these are synchronous. However, when this is > 0, these are performed asynchronously. This leads to only having referint enabled on one supplier in MMR to prevent replication conflicts and loops. Additionally, because these are performed in the background these updates may cause spurious update delays to your server by batching changes rather than smaller updates during sync processing. We advise that you set this value to 0, and enable referint on all suppliers as it provides a more predictable behaviour. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Set referint-update-delay to 0. You can use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 plugin referential-integrity set --update-delay 0 You must restart the Directory Server for this change to take effect. INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSRILE0001", "severity": "LOW", "description": "Referential integrity plugin may be slower.", "items": [ "cn=referential integrity postoperation,cn=plugins,cn=config" ], "detail": "The referential integrity plugin has an asynchronous processing mode.\nThis is controlled by the update-delay flag. When this value is 0, referential\nintegrity plugin processes these changes inside of the operation that modified\nthe entry - ie these are synchronous.\n\nHowever, when this is > 0, these are performed asynchronously.\n\nThis leads to only having referint enabled on one supplier in MMR to prevent replication conflicts and loops.\nAdditionally, because these are performed in the background these updates may cause spurious update\ndelays to your server by batching changes rather than smaller updates during sync processing.\n\nWe advise that you set this value to 0, and enable referint on all suppliers as it provides a more predictable behaviour.\n", "fix": "Set referint-update-delay to 0.\n\nYou can use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 plugin referential-integrity set --update-delay 0\n\nYou must restart the Directory Server for this change to take effect.", "check": "refint:update_delay" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_RI_plugin_missing_indexes | 0.37 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSRILE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: refint:attr_indexes INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=referential integrity postoperation,cn=plugins,cn=config INFO LogCapture:health.py:52 -- dc=example,dc=com INFO LogCapture:health.py:52 -- member INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The referential integrity plugin is configured to use an attribute (member) that does not have an "equality" index in backend (dc=example,dc=com). Failure to have the proper indexing will lead to unindexed searches which cause high CPU and can significantly slow the server down. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Check the attributes set in "referint-membership-attr" to make sure they have an index defined that has at least the equality "eq" index type. You will need to reindex the database after adding the missing index type. Here is an example using dsconf: # dsconf slapd-standalone1 backend index add --attr=member --reindex --index-type=eq dc=example,dc=com INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSRILE0002", "severity": "HIGH", "description": "Referential integrity plugin configured with unindexed attribute.", "items": [ "cn=referential integrity postoperation,cn=plugins,cn=config", "dc=example,dc=com", "member" ], "detail": "The referential integrity plugin is configured to use an attribute (member)\nthat does not have an \"equality\" index in backend (dc=example,dc=com).\nFailure to have the proper indexing will lead to unindexed searches which\ncause high CPU and can significantly slow the server down.", "fix": "Check the attributes set in \"referint-membership-attr\" to make sure they have\nan index defined that has at least the equality \"eq\" index type. You will\nneed to reindex the database after adding the missing index type. Here is an\nexample using dsconf:\n\n # dsconf slapd-standalone1 backend index add --attr=member --reindex --index-type=eq dc=example,dc=com\n", "check": "refint:attr_indexes" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_virtual_attr_incorrectly_indexed | 0.30 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSVIRTLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: backends:userroot:virt_attrs INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Virtual Attributes INFO LogCapture:health.py:52 -- dc=example,dc=com INFO LogCapture:health.py:52 -- Class Of Service (COS) INFO LogCapture:health.py:52 -- cosAttribute: postalcode INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 You should not index virtual attributes, and as this will break searches that use the attribute in a filter. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Remove the index for this attribute from the backend configuration. Here is an example using 'dsconf' to remove an index: # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSVIRTLE0001", "severity": "HIGH", "description": "Virtual attribute indexed.", "items": [ "Virtual Attributes", "dc=example,dc=com", "Class Of Service (COS)", "cosAttribute: postalcode" ], "detail": "You should not index virtual attributes, and as this will break searches that\nuse the attribute in a filter.", "fix": "Remove the index for this attribute from the backend configuration.\nHere is an example using 'dsconf' to remove an index:\n\n # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com", "check": "backends:userroot:virt_attrs" } ] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_low_disk_space | 0.73 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 2 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSVIRTLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: backends:userroot:virt_attrs INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Virtual Attributes INFO LogCapture:health.py:52 -- dc=example,dc=com INFO LogCapture:health.py:52 -- Class Of Service (COS) INFO LogCapture:health.py:52 -- cosAttribute: postalcode INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 You should not index virtual attributes, and as this will break searches that use the attribute in a filter. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Remove the index for this attribute from the backend configuration. Here is an example using 'dsconf' to remove an index: # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com INFO LogCapture:health.py:45 [2] DS Lint Error: DSDSLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: monitor-disk-space:disk_space INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Server INFO LogCapture:health.py:52 -- cn=config INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The disk partition used by the server (/), either for the database, the configuration files, or the logs is over 90% full. If the partition becomes completely filled serious problems can occur with the database or the server's stability. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Attempt to free up disk space. Also try removing old rotated logs, or disable any verbose logging levels that might have been set. You might consider enabling the "Disk Monitoring" feature in cn=config to help prevent a disorderly shutdown of the server: nsslapd-disk-monitoring: on You can use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 config replace nsslapd-disk-monitoring=on You must restart the Directory Server for this change to take effect. Please see the Administration guide for more information: https://access.redhat.com/documentation/en-us/red_hat_directory_server/10/html/administration_guide/diskmonitoring INFO LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSVIRTLE0001", "severity": "HIGH", "description": "Virtual attribute indexed.", "items": [ "Virtual Attributes", "dc=example,dc=com", "Class Of Service (COS)", "cosAttribute: postalcode" ], "detail": "You should not index virtual attributes, and as this will break searches that\nuse the attribute in a filter.", "fix": "Remove the index for this attribute from the backend configuration.\nHere is an example using 'dsconf' to remove an index:\n\n # dsconf slapd-standalone1 backend index delete --attr postalcode dc=example,dc=com", "check": "backends:userroot:virt_attrs" }, { "dsle": "DSDSLE0001", "severity": "HIGH", "description": "Low disk space.", "items": [ "Server", "cn=config" ], "detail": "The disk partition used by the server (/), either for the database, the\nconfiguration files, or the logs is over 90% full. If the partition becomes\ncompletely filled serious problems can occur with the database or the server's\nstability.", "fix": "Attempt to free up disk space. Also try removing old rotated logs, or disable any\nverbose logging levels that might have been set. You might consider enabling\nthe \"Disk Monitoring\" feature in cn=config to help prevent a disorderly shutdown\nof the server:\n\n nsslapd-disk-monitoring: on\n\nYou can use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 config replace nsslapd-disk-monitoring=on\n\nYou must restart the Directory Server for this change to take effect.\n\nPlease see the Administration guide for more information:\n\n https://access.redhat.com/documentation/en-us/red_hat_directory_server/10/html/administration_guide/diskmonitoring\n", "check": "monitor-disk-space:disk_space" } ] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_notes_unindexed_search | 12.55 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSLOGNOTES0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: Medium INFO LogCapture:health.py:49 Check: logs:notes INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Performance INFO LogCapture:health.py:52 -- /var/log/dirsrv/slapd-standalone1/access INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 Found 1 fully unindexed searches in the current access log. Unindexed searches can cause high CPU and slow down the entire server's performance. [1] Unindexed Search - date: . - conn/op: 1/2 - base: dc=example,dc=com - scope: 2 - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(uid=test*)) - etime: 0.384219970 INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Examine the searches that are unindexed, and either properly index the attributes in the filter, increase the nsslapd-idlistscanlimit, or stop using that filter. INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSLOGNOTES0001", "severity": "Medium", "description": "Unindexed Search", "items": [ "Performance", "/var/log/dirsrv/slapd-standalone1/access" ], "detail": "Found 1 fully unindexed searches in the current access log.\nUnindexed searches can cause high CPU and slow down the entire server's performance.\n\n [1] Unindexed Search\n - date: .\n - conn/op: 1/2\n - base: dc=example,dc=com\n - scope: 2\n - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(uid=test*))\n - etime: 0.384219970\n", "fix": "Examine the searches that are unindexed, and either properly index the attributes\nin the filter, increase the nsslapd-idlistscanlimit, or stop using that filter.", "check": "logs:notes" } ] | |||
Passed | suites/healthcheck/health_config_test.py::test_healthcheck_notes_unknown_attribute | 12.19 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSLOGNOTES0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: Medium INFO LogCapture:health.py:49 Check: logs:notes INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Possible Performance Impact INFO LogCapture:health.py:52 -- /var/log/dirsrv/slapd-standalone1/access INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 Found 1 searches in the current access log that are using an unknown attribute in the search filter. [1] Invalid Attribute in Filter - date: . - conn/op: 1/2 - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(unknown=test)) INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Stop using this these unknown attributes in the filter, or add the schema to the server and make sure it's properly indexed. INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSLOGNOTES0002", "severity": "Medium", "description": "Unknown Attribute In Filter", "items": [ "Possible Performance Impact", "/var/log/dirsrv/slapd-standalone1/access" ], "detail": "Found 1 searches in the current access log that are using an\nunknown attribute in the search filter.\n\n [1] Invalid Attribute in Filter\n - date: .\n - conn/op: 1/2\n - filter: (&(|(objectClass=nsAccount)(objectClass=nsPerson)(objectClass=simpleSecurityObject)(objectClass=organization)(objectClass=person)(objectClass=account)(objectClass=organizationalUnit)(objectClass=netscapeServer)(objectClass=domain)(objectClass=posixAccount)(objectClass=shadowAccount)(objectClass=posixGroup)(objectClass=mailRecipient))(unknown=test))\n", "fix": "Stop using this these unknown attributes in the filter, or add the schema\nto the server and make sure it's properly indexed.", "check": "logs:notes" } ] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_replication_replica_not_reachable | 29.11 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b6f71a00-9d5d-4d58-ab3f-76de8b852368 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0021e168-9906-44be-8f89-1235cad017cc / got description=b6f71a00-9d5d-4d58-ab3f-76de8b852368) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4a41c2df-577d-418d-a605-d61cba6379bb / got description=0021e168-9906-44be-8f89-1235cad017cc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0005 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: replication:agmts_status INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Agreement INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The replication agreement (002) under "dc=example,dc=com" is not in synchronization, because the consumer server is not reachable. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Check if the consumer is running, and also check the errors log for more information. INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSREPLLE0005", "severity": "MEDIUM", "description": "Replication consumer not reachable.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (002) under \"dc=example,dc=com\" is not in synchronization,\nbecause the consumer server is not reachable.", "fix": "Check if the consumer is running, and also check the errors log for more information.", "check": "replication:agmts_status" } ] INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a826be40-61f9-4514-b4b8-b1acae27122a / got description=4a41c2df-577d-418d-a605-d61cba6379bb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_changelog_trimming_not_configured | 3.35 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSCLLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:userroot::cl_trimming INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Changelog INFO LogCapture:health.py:52 -- Backends INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The replication changelog does have any kind of trimming configured. This will lead to the changelog size growing indefinitely. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Configure changelog trimming, preferably by setting the maximum age of a changelog record. Here is an example: # dsconf slapd-supplier1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSCLLE0001", "severity": "LOW", "description": "Changelog trimming not configured.", "items": [ "Replication", "Changelog", "Backends" ], "detail": "The replication changelog does have any kind of trimming configured. This will\nlead to the changelog size growing indefinitely.", "fix": "Configure changelog trimming, preferably by setting the maximum age of a changelog\nrecord. Here is an example:\n\n # dsconf slapd-supplier1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d", "check": "backends:userroot::cl_trimming" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_replication_presence_of_conflict_entries | 3.72 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 908bfa59-2638-4208-abe8-6cebfb9b6dfc / got description=a826be40-61f9-4514-b4b8-b1acae27122a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7a3d9de2-e955-434b-8abd-93ab1416d801 / got description=908bfa59-2638-4208-abe8-6cebfb9b6dfc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1d5027b1-fcb3-4a8c-8255-d7d9307fa3f2 / got description=7a3d9de2-e955-434b-8abd-93ab1416d801) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: replication:conflicts INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Conflict Entries INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 There were 1 conflict entries found under the replication suffix "dc=example,dc=com". INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 While conflict entries are expected to occur in an MMR environment, they should be resolved. In regards to conflict entries there is always the original/counterpart entry that has a normal DN, and then the conflict version of that entry. Technically both entries are valid, you as the administrator, needs to decide which entry you want to keep. First examine/compare both entries to determine which one you want to keep or remove. You can use the CLI tool "dsconf" to resolve the conflict. Here is an example: List the conflict entries: # dsconf slapd-supplier1 repl-conflict list dc=example,dc=com Examine conflict entry and its counterpart entry: # dsconf slapd-supplier1 repl-conflict compare <DN of conflict entry> Remove conflict entry and keep only the original/counterpart entry: # dsconf slapd-supplier1 repl-conflict delete <DN of conflict entry> Replace the original/counterpart entry with the conflict entry: # dsconf slapd-supplier1 repl-conflict swap <DN of conflict entry> INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSREPLLE0002", "severity": "LOW", "description": "Replication conflict entries found.", "items": [ "Replication", "Conflict Entries" ], "detail": "There were 1 conflict entries found under the replication suffix \"dc=example,dc=com\".", "fix": "While conflict entries are expected to occur in an MMR environment, they\nshould be resolved. In regards to conflict entries there is always the original/counterpart\nentry that has a normal DN, and then the conflict version of that entry. Technically both\nentries are valid, you as the administrator, needs to decide which entry you want to keep.\nFirst examine/compare both entries to determine which one you want to keep or remove. You\ncan use the CLI tool \"dsconf\" to resolve the conflict. Here is an example:\n\n List the conflict entries:\n\n # dsconf slapd-supplier1 repl-conflict list dc=example,dc=com\n\n Examine conflict entry and its counterpart entry:\n\n # dsconf slapd-supplier1 repl-conflict compare <DN of conflict entry>\n\n Remove conflict entry and keep only the original/counterpart entry:\n\n # dsconf slapd-supplier1 repl-conflict delete <DN of conflict entry>\n\n Replace the original/counterpart entry with the conflict entry:\n\n # dsconf slapd-supplier1 repl-conflict swap <DN of conflict entry>\n", "check": "replication:conflicts" } ] | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_non_replicated_suffixes | 0.21 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking backends:changelog:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:changelog:mappingtree ... INFO LogCapture:health.py:99 Checking backends:changelog:search ... INFO LogCapture:health.py:99 Checking backends:changelog:virt_attrs ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 2 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:changelog:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=changelog INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The backend database has not been initialized yet INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:changelog:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=changelog INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The backend database has not been initialized yet INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== | |||
Passed | suites/healthcheck/health_repl_test.py::test_healthcheck_replication_out_of_sync_broken | 49.39 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a1b21401-0ec3-42d5-9ea0-045eb4b83728 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e487c1c1-2583-4c41-89d7-f33cda04900b / got description=a1b21401-0ec3-42d5-9ea0-045eb4b83728) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 4c21076b-f26c-4d7c-890f-4d6b86d792eb / got description=e487c1c1-2583-4c41-89d7-f33cda04900b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a1484c38-f8a0-44b6-8533-0d65f8af9478 / got description=4c21076b-f26c-4d7c-890f-4d6b86d792eb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 3 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: replication:agmts_status INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Agreement INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The replication agreement (002) under "dc=example,dc=com" is not in synchronization. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You may need to reinitialize this replication agreement. Please check the errors log for more information. If you do need to reinitialize the agreement you can do so using dsconf. Here is an example: # dsconf slapd-supplier1 repl-agmt init "002" --suffix dc=example,dc=com INFO LogCapture:health.py:45 [2] DS Lint Error: DSREPLLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: replication:agmts_status INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Agreement INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The replication agreement (003) under "dc=example,dc=com" is not in synchronization. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You may need to reinitialize this replication agreement. Please check the errors log for more information. If you do need to reinitialize the agreement you can do so using dsconf. Here is an example: # dsconf slapd-supplier1 repl-agmt init "003" --suffix dc=example,dc=com INFO LogCapture:health.py:45 [3] DS Lint Error: DSCLLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:userroot::cl_trimming INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Changelog INFO LogCapture:health.py:52 -- Backends INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The replication changelog does have any kind of trimming configured. This will lead to the changelog size growing indefinitely. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Configure changelog trimming, preferably by setting the maximum age of a changelog record. Here is an example: # dsconf slapd-supplier1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d INFO LogCapture:health.py:124 ===== End Of Report (3 Issues found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSREPLLE0001", "severity": "HIGH", "description": "Replication agreement not set to be synchronized.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (002) under \"dc=example,dc=com\" is not in synchronization.", "fix": "You may need to reinitialize this replication agreement. Please check the errors\nlog for more information. If you do need to reinitialize the agreement you can do so\nusing dsconf. Here is an example:\n\n # dsconf slapd-supplier1 repl-agmt init \"002\" --suffix dc=example,dc=com", "check": "replication:agmts_status" }, { "dsle": "DSREPLLE0001", "severity": "HIGH", "description": "Replication agreement not set to be synchronized.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (003) under \"dc=example,dc=com\" is not in synchronization.", "fix": "You may need to reinitialize this replication agreement. Please check the errors\nlog for more information. If you do need to reinitialize the agreement you can do so\nusing dsconf. Here is an example:\n\n # dsconf slapd-supplier1 repl-agmt init \"003\" --suffix dc=example,dc=com", "check": "replication:agmts_status" }, { "dsle": "DSCLLE0001", "severity": "LOW", "description": "Changelog trimming not configured.", "items": [ "Replication", "Changelog", "Backends" ], "detail": "The replication changelog does have any kind of trimming configured. This will\nlead to the changelog size growing indefinitely.", "fix": "Configure changelog trimming, preferably by setting the maximum age of a changelog\nrecord. Here is an example:\n\n # dsconf slapd-supplier1 replication set-changelog --suffix YOUR_SUFFIX --max-age 30d", "check": "backends:userroot::cl_trimming" } ] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_insecure_pwd_hash_configured | 9.14 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSCLE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: config:passwordscheme INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=config INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 Password storage schemes in Directory Server define how passwords are hashed via a one-way mathematical function for storage. Knowing the hash it is difficult to gain the input, but knowing the input you can easily compare the hash. Many hashes are well known for cryptograhpic verification properties, but are designed to be *fast* to validate. This is the opposite of what we desire for password storage. In the unlikely event of a disclosure, you want hashes to be *difficult* to verify, as this adds a cost of work to an attacker. In Directory Server, we offer one hash suitable for this (PBKDF2_SHA256) and one hash for "legacy" support (SSHA512). Your configuration does not use these for password storage or the root password storage scheme. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Perform a configuration reset of the values: passwordStorageScheme nsslapd-rootpwstoragescheme IE, stop Directory Server, and in dse.ldif delete these two lines. When Directory Server is started, they will use the server provided defaults that are secure. You can also use 'dsconf' to replace these values. Here is an example: # dsconf slapd-standalone1 config replace passwordStorageScheme=PBKDF2_SHA256 nsslapd-rootpwstoragescheme=PBKDF2_SHA256 INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSCLE0002", "severity": "HIGH", "description": "Weak passwordStorageScheme.", "items": [ "cn=config" ], "detail": "Password storage schemes in Directory Server define how passwords are hashed via a\none-way mathematical function for storage. Knowing the hash it is difficult to gain\nthe input, but knowing the input you can easily compare the hash.\n\nMany hashes are well known for cryptograhpic verification properties, but are\ndesigned to be *fast* to validate. This is the opposite of what we desire for password\nstorage. In the unlikely event of a disclosure, you want hashes to be *difficult* to\nverify, as this adds a cost of work to an attacker.\n\nIn Directory Server, we offer one hash suitable for this (PBKDF2_SHA256) and one hash\nfor \"legacy\" support (SSHA512).\n\nYour configuration does not use these for password storage or the root password storage\nscheme.\n", "fix": "Perform a configuration reset of the values:\n\npasswordStorageScheme\nnsslapd-rootpwstoragescheme\n\nIE, stop Directory Server, and in dse.ldif delete these two lines. When Directory Server\nis started, they will use the server provided defaults that are secure.\n\nYou can also use 'dsconf' to replace these values. Here is an example:\n\n # dsconf slapd-standalone1 config replace passwordStorageScheme=PBKDF2_SHA256 nsslapd-rootpwstoragescheme=PBKDF2_SHA256", "check": "config:passwordscheme" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_min_allowed_tls_version_too_low | 21.68 | |
------------------------------Captured stdout call------------------------------ Setting system policy to LEGACY Note: System-wide crypto policies are applied on application start-up. It is recommended to restart the system for the change of policies to fully take place. -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSELE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: encryption:check_tls_version INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- cn=encryption,cn=config INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 This Directory Server may not be using strong TLS protocol versions. TLS1.0 is known to have a number of issues with the protocol. Please see: https://tools.ietf.org/html/rfc7457 It is advised you set this value to the maximum possible. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 There are two options for setting the TLS minimum version allowed. You, can set "sslVersionMin" in "cn=encryption,cn=config" to a version greater than "TLS1.0" You can also use 'dsconf' to set this value. Here is an example: # dsconf slapd-standalone1 security set --tls-protocol-min=TLS1.2 You must restart the Directory Server for this change to take effect. Or, you can set the system wide crypto policy to FUTURE which will use a higher TLS minimum version, but doing this affects the entire system: # update-crypto-policies --set FUTURE INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSELE0001", "severity": "MEDIUM", "description": "Weak TLS protocol version.", "items": [ "cn=encryption,cn=config" ], "detail": "This Directory Server may not be using strong TLS protocol versions. TLS1.0 is known to\nhave a number of issues with the protocol. Please see:\n\nhttps://tools.ietf.org/html/rfc7457\n\nIt is advised you set this value to the maximum possible.", "fix": "There are two options for setting the TLS minimum version allowed. You,\ncan set \"sslVersionMin\" in \"cn=encryption,cn=config\" to a version greater than \"TLS1.0\"\nYou can also use 'dsconf' to set this value. Here is an example:\n\n # dsconf slapd-standalone1 security set --tls-protocol-min=TLS1.2\n\nYou must restart the Directory Server for this change to take effect.\n\nOr, you can set the system wide crypto policy to FUTURE which will use a higher TLS\nminimum version, but doing this affects the entire system:\n\n # update-crypto-policies --set FUTURE", "check": "encryption:check_tls_version" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_resolvconf_bad_file_perm | 0.87 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSPERMLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: fschecks:file_perms INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- File Permissions INFO LogCapture:health.py:52 -- /etc/resolv.conf INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The file "/etc/resolv.conf" does not have the expected permissions (644). This can cause issues with replication and chaining. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Change the file permissions: # chmod 644 /etc/resolv.conf INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSPERMLE0001", "severity": "MEDIUM", "description": "Incorrect file permissions.", "items": [ "File Permissions", "/etc/resolv.conf" ], "detail": "The file \"/etc/resolv.conf\" does not have the expected permissions (644). This\ncan cause issues with replication and chaining.", "fix": "Change the file permissions:\n\n # chmod 644 /etc/resolv.conf", "check": "fschecks:file_perms" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_security_test.py::test_healthcheck_pwdfile_bad_file_perm | 0.97 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSPERMLE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: fschecks:file_perms INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- File Permissions INFO LogCapture:health.py:52 -- /etc/dirsrv/slapd-standalone1/pwdfile.txt INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The file "/etc/dirsrv/slapd-standalone1/pwdfile.txt" does not have the expected permissions (400). The security database pin/password files should only be readable by Directory Server user. INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Change the file permissions: # chmod 400 /etc/dirsrv/slapd-standalone1/pwdfile.txt INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSPERMLE0002", "severity": "HIGH", "description": "Incorrect security database file permissions.", "items": [ "File Permissions", "/etc/dirsrv/slapd-standalone1/pwdfile.txt" ], "detail": "The file \"/etc/dirsrv/slapd-standalone1/pwdfile.txt\" does not have the expected permissions (400). The\nsecurity database pin/password files should only be readable by Directory Server user.", "fix": "Change the file permissions:\n\n # chmod 400 /etc/dirsrv/slapd-standalone1/pwdfile.txt", "check": "fschecks:file_perms" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/health_sync_test.py::test_healthcheck_replication_out_of_sync_not_broken | 82.01 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0e8d8a33-f0d2-4db7-9775-c246e61f73f8 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9a2e47a5-02a6-40a5-8739-871558731f09 / got description=0e8d8a33-f0d2-4db7-9775-c246e61f73f8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 76781299-65af-4b34-9944-8a996031e4b0 / got description=9a2e47a5-02a6-40a5-8739-871558731f09) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 5561c392-4d32-468a-8e9e-cb059091e94e / got description=76781299-65af-4b34-9944-8a996031e4b0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSREPLLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: replication:agmts_status INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- Replication INFO LogCapture:health.py:52 -- Agreement INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The replication agreement (001) under "dc=example,dc=com" is not in synchronization. Status message: error (1) can't acquire busy replica (unable to acquire replica: the replica is currently being updated by another supplier.) INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Replication is not in synchronization but it may recover. Continue to monitor this agreement. INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSREPLLE0003", "severity": "MEDIUM", "description": "Unsynchronized replication agreement.", "items": [ "Replication", "Agreement" ], "detail": "The replication agreement (001) under \"dc=example,dc=com\" is not in synchronization.\nStatus message: error (1) can't acquire busy replica (unable to acquire replica: the replica is currently being updated by another supplier.)", "fix": "Replication is not in synchronization but it may recover. Continue to\nmonitor this agreement.", "check": "replication:agmts_status" } ] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_disabled_suffix | 8.15 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 2 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: backends:userroot:mappingtree INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- userroot INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO LogCapture:health.py:45 [2] DS Lint Error: DSBLE0002 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: HIGH INFO LogCapture:health.py:49 Check: backends:userroot:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=example,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 27, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\n'}) INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Check the server's error and access logs for more information. INFO LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userroot" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userroot:mappingtree" }, { "dsle": "DSBLE0002", "severity": "HIGH", "description": "Unable to query backend.", "items": [ "dc=example,dc=com" ], "detail": "Unable to query the backend. LDAP error ({'msgtype': 101, 'msgid': 27, 'result': 1, 'desc': 'Operations error', 'ctrls': [], 'info': 'Warning: Operation attempted on a disabled node : dc=example,dc=com\\n'})", "fix": "Check the server's error and access logs for more information.", "check": "backends:userroot:search" } ] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_standalone | 0.50 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_list_checks | 0.34 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:90 config:hr_timestamp INFO LogCapture:health.py:90 config:passwordscheme INFO LogCapture:health.py:90 backends:userroot:cl_trimming INFO LogCapture:health.py:90 backends:userroot:mappingtree INFO LogCapture:health.py:90 backends:userroot:search INFO LogCapture:health.py:90 backends:userroot:virt_attrs INFO LogCapture:health.py:90 encryption:check_tls_version INFO LogCapture:health.py:90 fschecks:file_perms INFO LogCapture:health.py:90 refint:attr_indexes INFO LogCapture:health.py:90 refint:update_delay INFO LogCapture:health.py:90 monitor-disk-space:disk_space INFO LogCapture:health.py:90 replication:agmts_status INFO LogCapture:health.py:90 replication:conflicts INFO LogCapture:health.py:90 dseldif:nsstate INFO LogCapture:health.py:90 tls:certificate_expiration INFO LogCapture:health.py:90 logs:notes | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_list_errors | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:71 DSBLE0001 :: Possibly incorrect mapping tree. INFO LogCapture:health.py:71 DSBLE0002 :: Unable to query backend. INFO LogCapture:health.py:71 DSBLE0003 :: Uninitialized backend database. INFO LogCapture:health.py:71 DSCERTLE0001 :: Certificate about to expire. INFO LogCapture:health.py:71 DSCERTLE0002 :: Certificate expired. INFO LogCapture:health.py:71 DSCLE0001 :: Different log timestamp format. INFO LogCapture:health.py:71 DSCLE0002 :: Weak passwordStorageScheme. INFO LogCapture:health.py:71 DSCLLE0001 :: Changelog trimming not configured. INFO LogCapture:health.py:71 DSDSLE0001 :: Low disk space. INFO LogCapture:health.py:71 DSELE0001 :: Weak TLS protocol version. INFO LogCapture:health.py:71 DSLOGNOTES0001 :: Unindexed Search INFO LogCapture:health.py:71 DSLOGNOTES0002 :: Unknown Attribute In Filter INFO LogCapture:health.py:71 DSPERMLE0001 :: Incorrect file permissions. INFO LogCapture:health.py:71 DSPERMLE0002 :: Incorrect security database file permissions. INFO LogCapture:health.py:71 DSREPLLE0001 :: Replication agreement not set to be synchronized. INFO LogCapture:health.py:71 DSREPLLE0002 :: Replication conflict entries found. INFO LogCapture:health.py:71 DSREPLLE0003 :: Unsynchronized replication agreement. INFO LogCapture:health.py:71 DSREPLLE0004 :: Unable to get replication agreement status. INFO LogCapture:health.py:71 DSREPLLE0005 :: Replication consumer not reachable. INFO LogCapture:health.py:71 DSRILE0001 :: Referential integrity plugin may be slower. INFO LogCapture:health.py:71 DSRILE0002 :: Referential integrity plugin configured with unindexed attribute. INFO LogCapture:health.py:71 DSSKEWLE0001 :: Medium time skew. INFO LogCapture:health.py:71 DSSKEWLE0002 :: Major time skew. INFO LogCapture:health.py:71 DSSKEWLE0003 :: Extensive time skew. INFO LogCapture:health.py:71 DSVIRTLE0001 :: Virtual attribute indexed. | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_check_option | 1.37 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_standalone_tls | 9.60 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_replication | 25.46 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9431910e-5fbb-4e5b-9c3c-c267331eef5b / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7517e3cc-91bb-4ccf-afb4-c6a5d0865611 / got description=9431910e-5fbb-4e5b-9c3c-c267331eef5b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_replication_tls | 18.71 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_backend_missing_mapping_tree | 1.61 | |
-------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 2 Issues found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSBLE0001 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: MEDIUM INFO LogCapture:health.py:49 Check: backends:userroot:mappingtree INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- userroot INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 This backend may be missing the correct mapping tree references. Mapping Trees allow the directory server to determine which backend an operation is routed to in the abscence of other information. This is extremely important for correct functioning of LDAP ADD for example. A correct Mapping tree for this backend must contain the suffix name, the database name and be a backend type. IE: cn=o3Dexample,cn=mapping tree,cn=config cn: o=example nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 Either you need to create the mapping tree, or you need to repair the related mapping tree. You will need to do this by hand by editing cn=config, or stopping the instance and editing dse.ldif. INFO LogCapture:health.py:45 [2] DS Lint Error: DSBLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:userroot:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=example,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The backend database has not been initialized yet INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO LogCapture:health.py:124 ===== End Of Report (2 Issues found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSBLE0001", "severity": "MEDIUM", "description": "Possibly incorrect mapping tree.", "items": [ "userroot" ], "detail": "This backend may be missing the correct mapping tree references. Mapping Trees allow\nthe directory server to determine which backend an operation is routed to in the\nabscence of other information. This is extremely important for correct functioning\nof LDAP ADD for example.\n\nA correct Mapping tree for this backend must contain the suffix name, the database name\nand be a backend type. IE:\n\ncn=o3Dexample,cn=mapping tree,cn=config\ncn: o=example\nnsslapd-backend: userRoot\nnsslapd-state: backend\nobjectClass: top\nobjectClass: extensibleObject\nobjectClass: nsMappingTree\n\n", "fix": "Either you need to create the mapping tree, or you need to repair the related\nmapping tree. You will need to do this by hand by editing cn=config, or stopping\nthe instance and editing dse.ldif.\n", "check": "backends:userroot:mappingtree" }, { "dsle": "DSBLE0003", "severity": "LOW", "description": "Uninitialized backend database.", "items": [ "dc=example,dc=com" ], "detail": "The backend database has not been initialized yet", "fix": "You need to import an LDIF file, or create the suffix entry, in order to initialize the database.", "check": "backends:userroot:search" } ] INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:111 No issues found. INFO LogCapture:health.py:113 [] | |||
Passed | suites/healthcheck/healthcheck_test.py::test_healthcheck_database_not_initialized | 14.97 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1'} was created. -------------------------------Captured log call-------------------------------- INFO LogCapture:health.py:94 Beginning lint report, this could take a while ... INFO LogCapture:health.py:99 Checking config:hr_timestamp ... INFO LogCapture:health.py:99 Checking config:passwordscheme ... INFO LogCapture:health.py:99 Checking backends:userroot:cl_trimming ... INFO LogCapture:health.py:99 Checking backends:userroot:mappingtree ... INFO LogCapture:health.py:99 Checking backends:userroot:search ... INFO LogCapture:health.py:99 Checking backends:userroot:virt_attrs ... INFO LogCapture:health.py:99 Checking encryption:check_tls_version ... INFO LogCapture:health.py:99 Checking fschecks:file_perms ... INFO LogCapture:health.py:99 Checking refint:attr_indexes ... INFO LogCapture:health.py:99 Checking refint:update_delay ... INFO LogCapture:health.py:99 Checking monitor-disk-space:disk_space ... INFO LogCapture:health.py:99 Checking replication:agmts_status ... INFO LogCapture:health.py:99 Checking replication:conflicts ... INFO LogCapture:health.py:99 Checking dseldif:nsstate ... INFO LogCapture:health.py:99 Checking tls:certificate_expiration ... INFO LogCapture:health.py:99 Checking logs:notes ... INFO LogCapture:health.py:106 Healthcheck complete. INFO LogCapture:health.py:119 1 Issue found! Generating report ... INFO LogCapture:health.py:45 [1] DS Lint Error: DSBLE0003 INFO LogCapture:health.py:46 -------------------------------------------------------------------------------- INFO LogCapture:health.py:47 Severity: LOW INFO LogCapture:health.py:49 Check: backends:userroot:search INFO LogCapture:health.py:50 Affects: INFO LogCapture:health.py:52 -- dc=example,dc=com INFO LogCapture:health.py:53 Details: INFO LogCapture:health.py:54 ----------- INFO LogCapture:health.py:55 The backend database has not been initialized yet INFO LogCapture:health.py:56 Resolution: INFO LogCapture:health.py:57 ----------- INFO LogCapture:health.py:58 You need to import an LDIF file, or create the suffix entry, in order to initialize the database. INFO LogCapture:health.py:124 ===== End Of Report (1 Issue found) ===== INFO LogCapture:health.py:126 [ { "dsle": "DSBLE0003", "severity": "LOW", "description": "Uninitialized backend database.", "items": [ "dc=example,dc=com" ], "detail": "The backend database has not been initialized yet", "fix": "You need to import an LDIF file, or create the suffix entry, in order to initialize the database.", "check": "backends:userroot:search" } ] | |||
Passed | suites/import/import_test.py::test_import_with_index | 16.62 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/import/import_test.py::test_online_import_with_warning | 5.97 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_crash_on_ldif2db | 6.28 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_ldif2db_allows_entries_without_a_parent_to_be_imported | 5.37 | |
-------------------------------Captured log call-------------------------------- ERROR lib389:__init__.py:2644 ldif2db: Can't find file: /var/lib/dirsrv/slapd-standalone1/ldif/bogus.ldif | |||
Passed | suites/import/import_test.py::test_ldif2db_syntax_check | 6.60 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_issue_a_warning_if_the_cache_size_is_smaller | 7.27 | |
No log output captured. | |||
Passed | suites/import/import_test.py::test_fast_slow_import | 60.85 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.import.import_test:import_test.py:402 total_time1 = 2.357277 INFO tests.suites.import.import_test:import_test.py:403 total_time2 = 2.383685 INFO tests.suites.import.import_test:import_test.py:421 toral_time1 = 2.565248 INFO tests.suites.import.import_test:import_test.py:422 total_time2 = 2.926467 | |||
Passed | suites/import/import_test.py::test_entry_with_escaped_characters_fails_to_import_and_index | 13.29 | |
------------------------------Captured stderr call------------------------------ [04/Jun/2021:23:04:22.759620503 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [04/Jun/2021:23:04:22.767163238 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7486660608, process usage 27062272 [04/Jun/2021:23:04:22.769958231 -0400] - INFO - check_and_set_import_cache - Import allocates 2924476KB import cache. [04/Jun/2021:23:04:22.777083394 -0400] - INFO - bdb_copy_directory - Backing up file 0 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/mail.db) [04/Jun/2021:23:04:22.780164277 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/mail.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/mail.db [04/Jun/2021:23:04:22.782947294 -0400] - INFO - bdb_copy_directory - Backing up file 1 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/id2entry.db) [04/Jun/2021:23:04:22.785462727 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/id2entry.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/id2entry.db [04/Jun/2021:23:04:22.788269514 -0400] - INFO - bdb_copy_directory - Backing up file 2 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/givenName.db) [04/Jun/2021:23:04:22.791121459 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/givenName.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/givenName.db [04/Jun/2021:23:04:22.793796819 -0400] - INFO - bdb_copy_directory - Backing up file 3 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/parentid.db) [04/Jun/2021:23:04:22.797790246 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/parentid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/parentid.db [04/Jun/2021:23:04:22.801664008 -0400] - INFO - bdb_copy_directory - Backing up file 4 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/ancestorid.db) [04/Jun/2021:23:04:22.804467480 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/ancestorid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/ancestorid.db [04/Jun/2021:23:04:22.807434073 -0400] - INFO - bdb_copy_directory - Backing up file 5 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/roomNumber.db) [04/Jun/2021:23:04:22.810479826 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/roomNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/roomNumber.db [04/Jun/2021:23:04:22.813558879 -0400] - INFO - bdb_copy_directory - Backing up file 6 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/entryrdn.db) [04/Jun/2021:23:04:22.820366625 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/entryrdn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/entryrdn.db [04/Jun/2021:23:04:22.823274979 -0400] - INFO - bdb_copy_directory - Backing up file 7 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/sn.db) [04/Jun/2021:23:04:22.826020494 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/sn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/sn.db [04/Jun/2021:23:04:22.829013434 -0400] - INFO - bdb_copy_directory - Backing up file 8 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/objectclass.db) [04/Jun/2021:23:04:22.832172587 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/objectclass.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/objectclass.db [04/Jun/2021:23:04:22.835279292 -0400] - INFO - bdb_copy_directory - Backing up file 9 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/DBVERSION) [04/Jun/2021:23:04:22.838681366 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/DBVERSION [04/Jun/2021:23:04:22.841286998 -0400] - INFO - bdb_copy_directory - Backing up file 10 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/uid.db) [04/Jun/2021:23:04:22.843588651 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/uid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/uid.db [04/Jun/2021:23:04:22.846336870 -0400] - INFO - bdb_copy_directory - Backing up file 11 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/aci.db) [04/Jun/2021:23:04:22.849078858 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/aci.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/aci.db [04/Jun/2021:23:04:22.852271430 -0400] - INFO - bdb_copy_directory - Backing up file 12 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/nsuniqueid.db) [04/Jun/2021:23:04:22.854539362 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/nsuniqueid.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/nsuniqueid.db [04/Jun/2021:23:04:22.857263399 -0400] - INFO - bdb_copy_directory - Backing up file 13 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/cn.db) [04/Jun/2021:23:04:22.859839909 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/cn.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/cn.db [04/Jun/2021:23:04:22.862464355 -0400] - INFO - bdb_copy_directory - Backing up file 14 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/numsubordinates.db) [04/Jun/2021:23:04:22.864998935 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/numsubordinates.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/numsubordinates.db [04/Jun/2021:23:04:22.868064466 -0400] - INFO - bdb_copy_directory - Backing up file 15 (/var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/telephoneNumber.db) [04/Jun/2021:23:04:22.870771709 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/userRoot/telephoneNumber.db to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/userRoot/telephoneNumber.db [04/Jun/2021:23:04:22.873971877 -0400] - INFO - upgradedb_core - userRoot: Start upgradedb. [04/Jun/2021:23:04:22.876774465 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7487008768, process usage 27934720 [04/Jun/2021:23:04:22.879352995 -0400] - INFO - check_and_set_import_cache - Import allocates 2924612KB import cache. [04/Jun/2021:23:04:23.346996098 -0400] - INFO - bdb_import_main - reindex userRoot: Index buffering enabled with bucket size 100 [04/Jun/2021:23:04:24.052335416 -0400] - INFO - import_monitor_threads - reindex userRoot: Workers finished; cleaning up... [04/Jun/2021:23:04:24.255850253 -0400] - INFO - import_monitor_threads - reindex userRoot: Workers cleaned up. [04/Jun/2021:23:04:24.259837631 -0400] - INFO - bdb_import_main - reindex userRoot: Cleaning up producer thread... [04/Jun/2021:23:04:24.262552242 -0400] - INFO - bdb_import_main - reindex userRoot: Indexing complete. Post-processing... [04/Jun/2021:23:04:24.265207277 -0400] - INFO - bdb_import_main - reindex userRoot: Generating numsubordinates (this may take several minutes to complete)... [04/Jun/2021:23:04:24.267874761 -0400] - INFO - bdb_import_main - reindex userRoot: Generating numSubordinates complete. [04/Jun/2021:23:04:24.270335309 -0400] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Gathering ancestorid non-leaf IDs... [04/Jun/2021:23:04:24.272857095 -0400] - INFO - bdb_get_nonleaf_ids - reindex userRoot: Finished gathering ancestorid non-leaf IDs. [04/Jun/2021:23:04:24.275500849 -0400] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Starting sort of ancestorid non-leaf IDs... [04/Jun/2021:23:04:24.277968290 -0400] - INFO - ldbm_get_nonleaf_ids - reindex userRoot: Finished sort of ancestorid non-leaf IDs. [04/Jun/2021:23:04:24.283414725 -0400] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Creating ancestorid index (new idl)... [04/Jun/2021:23:04:24.286000578 -0400] - INFO - bdb_ancestorid_new_idl_create_index - reindex userRoot: Created ancestorid index (new idl). [04/Jun/2021:23:04:24.288439238 -0400] - INFO - bdb_import_main - reindex userRoot: Flushing caches... [04/Jun/2021:23:04:24.290922468 -0400] - INFO - bdb_import_main - reindex userRoot: Closing files... [04/Jun/2021:23:04:24.393654610 -0400] - INFO - bdb_import_main - reindex userRoot: Reindexing complete. Processed 15 entries in 1 seconds. (15.00 entries/sec) [04/Jun/2021:23:04:24.397323063 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000003 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000003 [04/Jun/2021:23:04:24.412818393 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000005 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000005 [04/Jun/2021:23:04:24.426854515 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000004 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000004 [04/Jun/2021:23:04:24.442111996 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000001 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000001 [04/Jun/2021:23:04:24.461956788 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000002 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000002 [04/Jun/2021:23:04:24.477328167 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000007 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000007 [04/Jun/2021:23:04:24.489191754 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/log.0000000006 to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/log.0000000006 [04/Jun/2021:23:04:24.500687389 -0400] - INFO - dblayer_copyfile - Copying /var/lib/dirsrv/slapd-standalone1/db/DBVERSION to /var/lib/dirsrv/slapd-standalone1/bak/reindex_2021-06-04T23:04:22.632788/DBVERSION [04/Jun/2021:23:04:24.503691154 -0400] - INFO - bdb_pre_close - All database threads now stopped | |||
Passed | suites/import/import_test.py::test_import_perf_after_failure | 24.76 | |
No log output captured. | |||
Passed | suites/import/regression_test.py::test_replay_import_operation | 39.18 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.import.regression_test:regression_test.py:94 Exporting LDIF online... INFO tests.suites.import.regression_test:regression_test.py:53 Run. INFO tests.suites.import.regression_test:regression_test.py:104 Importing LDIF online, should raise operation error. INFO tests.suites.import.regression_test:regression_test.py:115 Looping. Tried 1 times so far. INFO tests.suites.import.regression_test:regression_test.py:115 Looping. Tried 2 times so far. INFO tests.suites.import.regression_test:regression_test.py:115 Looping. Tried 3 times so far. INFO tests.suites.import.regression_test:regression_test.py:57 Adding users. INFO tests.suites.import.regression_test:regression_test.py:119 Importing LDIF online | |||
Passed | suites/import/regression_test.py::test_import_be_default | 12.46 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.import.regression_test:regression_test.py:143 Adding suffix:dc=default,dc=com and backend: default... INFO tests.suites.import.regression_test:regression_test.py:149 Create LDIF file and import it... INFO tests.suites.import.regression_test:regression_test.py:154 Stopping the server and running offline import... INFO tests.suites.import.regression_test:regression_test.py:160 Verifying entry count after import... INFO tests.suites.import.regression_test:regression_test.py:166 Test PASSED | |||
Passed | suites/import/regression_test.py::test_del_suffix_import | 6.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.import.regression_test:regression_test.py:183 Adding suffix:dc=importest1,dc=com and backend: importest1 INFO tests.suites.import.regression_test:regression_test.py:188 Create LDIF file and import it INFO tests.suites.import.regression_test:regression_test.py:194 Stopping the server and running offline import INFO tests.suites.import.regression_test:regression_test.py:199 Deleting suffix-dc=importest2,dc=com INFO tests.suites.import.regression_test:regression_test.py:202 Adding the same database-importest1 after deleting it | |||
Passed | suites/import/regression_test.py::test_del_suffix_backend | 7.84 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.import.regression_test:regression_test.py:221 Adding suffix:dc=importest2,dc=com and backend: importest2 INFO tests.suites.import.regression_test:regression_test.py:226 Create LDIF file and import it INFO lib389:tasks.py:525 Import task import_06042021_230551 for file /var/lib/dirsrv/slapd-standalone1/ldif/suffix_del2.ldif completed successfully INFO tests.suites.import.regression_test:regression_test.py:234 Deleting suffix-dc=importest2,dc=com INFO tests.suites.import.regression_test:regression_test.py:237 Adding the same database-importest2 after deleting it INFO tests.suites.import.regression_test:regression_test.py:240 Checking if server can be restarted after re-adding the same database | |||
Passed | suites/import/regression_test.py::test_import_duplicate_dn | 15.15 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.import.regression_test:regression_test.py:266 Delete the previous error logs INFO tests.suites.import.regression_test:regression_test.py:269 Create import file INFO tests.suites.import.regression_test:regression_test.py:293 Import ldif with duplicate entry ERROR lib389:tasks.py:522 Error: import task import_06042021_230601 for file /var/lib/dirsrv/slapd-standalone1/ldif/data.ldif exited with -23 INFO tests.suites.import.regression_test:regression_test.py:296 Restart the server to flush the logs INFO tests.suites.import.regression_test:regression_test.py:299 Error log should not have "unable to flush" message INFO tests.suites.import.regression_test:regression_test.py:302 Error log should have "Duplicated DN detected" message | |||
Passed | suites/import/regression_test.py::test_large_ldif2db_ancestorid_index_creation | 623.78 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.import.regression_test:regression_test.py:355 Delete the previous errors logs INFO tests.suites.import.regression_test:regression_test.py:358 Add suffix:o=test and backend: test... INFO tests.suites.import.regression_test:regression_test.py:371 Create a large nested ldif file using dbgen : /var/lib/dirsrv/slapd-standalone1/ldif/large_nested.ldif INFO tests.suites.import.regression_test:regression_test.py:374 Stop the server and run offline import... INFO tests.suites.import.regression_test:regression_test.py:379 Starting the server INFO tests.suites.import.regression_test:regression_test.py:382 parse the errors logs to check lines with "Starting sort of ancestorid" are present INFO tests.suites.import.regression_test:regression_test.py:386 parse the errors logs to check lines with "Finished sort of ancestorid" are present INFO tests.suites.import.regression_test:regression_test.py:390 parse the error logs for the line with "Gathering ancestorid non-leaf IDs" INFO tests.suites.import.regression_test:regression_test.py:394 parse the error logs for the line with "Created ancestorid index" INFO tests.suites.import.regression_test:regression_test.py:398 get the ancestorid non-leaf IDs indexing start and end time from the collected strings INFO tests.suites.import.regression_test:regression_test.py:404 Calculate the elapsed time for the ancestorid non-leaf IDs index creation | |||
Passed | suites/indexes/regression_test.py::test_reindex_task_creates_abandoned_index_file | 20.99 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:837 Index task index_all_06042021_231646 completed successfully INFO lib389:tasks.py:837 Index task index_all_06042021_231649 completed successfully INFO lib389:tasks.py:837 Index task index_all_06042021_231655 completed successfully | |||
Passed | suites/indexes/regression_test.py::test_unindexed_internal_search_crashes_server | 20.51 | |
No log output captured. | |||
Passed | suites/ldapi/ldapi_test.py::test_ldapi_authdn_attr_rewrite | 28.13 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ # extended LDIF # # LDAPv3 # base <> with scope baseObject # filter: (objectclass=*) # requesting: ALL # # dn: objectClass: top defaultnamingcontext: dc=example,dc=com dataversion: 020210605031731 netscapemdsuffix: cn=ldap://dc=localhost,dc=localdomain:38901 # search result search: 2 result: 0 Success # numResponses: 2 # numEntries: 1 # extended LDIF # # LDAPv3 # base <> with scope baseObject # filter: (objectclass=*) # requesting: ALL # # dn: objectClass: top defaultnamingcontext: dc=example,dc=com dataversion: 020210605031731 netscapemdsuffix: cn=ldap://dc=localhost,dc=localdomain:38901 # search result search: 2 result: 0 Success # numResponses: 2 # numEntries: 1 # extended LDIF # # LDAPv3 # base <> with scope baseObject # filter: (objectclass=*) # requesting: ALL # # dn: objectClass: top defaultnamingcontext: dc=example,dc=com dataversion: 020210605031734 netscapemdsuffix: cn=ldap://dc=localhost,dc=localdomain:38901 # search result search: 2 result: 0 Success # numResponses: 2 # numEntries: 1 # extended LDIF # # LDAPv3 # base <> with scope baseObject # filter: (objectclass=*) # requesting: ALL # # dn: objectClass: top defaultnamingcontext: dc=example,dc=com dataversion: 020210605031734 netscapemdsuffix: cn=ldap://dc=localhost,dc=localdomain:38901 # search result search: 2 result: 0 Success # numResponses: 2 # numEntries: 1 ------------------------------Captured stderr call------------------------------ SASL/EXTERNAL authentication started SASL username: gidNumber=5001+uidNumber=5001,cn=peercred,cn=external,cn=auth SASL SSF: 0 SASL/EXTERNAL authentication started SASL username: gidNumber=0+uidNumber=0,cn=peercred,cn=external,cn=auth SASL SSF: 0 SASL/EXTERNAL authentication started SASL username: gidNumber=5002+uidNumber=5002,cn=peercred,cn=external,cn=auth SASL SSF: 0 SASL/EXTERNAL authentication started SASL username: gidNumber=5003+uidNumber=5003,cn=peercred,cn=external,cn=auth SASL SSF: 0 | |||
Passed | suites/lib389/config_compare_test.py::test_config_compare | 18.66 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_mul_explicit_rdn | 7.95 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_mul_derive_single_dn | 0.08 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_mul_derive_mult_dn | 0.25 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_explicit_dn | 0.32 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_explicit_rdn | 0.07 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_derive_single_dn | 0.08 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_derive_mult_dn | 0.10 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_invalid_no_basedn | 0.07 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_invalid_no_rdn | 0.07 | |
No log output captured. | |||
Passed | suites/lib389/dsldapobject/dn_construct_test.py::test_sin_non_present_rdn | 1.27 | |
No log output captured. | |||
Passed | suites/lib389/idm/account_test.py::test_account_delete | 10.28 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/idm/user_compare_i2_test.py::test_user_compare_i2 | 21.21 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/lib389/idm/user_compare_m2Repl_test.py::test_user_compare_m2Repl | 28.69 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 67856538-c785-4e56-bc74-23a708481b98 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 852878f1-fbc0-4c13-b426-4a86e89dc3fc / got description=67856538-c785-4e56-bc74-23a708481b98) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7193eb5a-ee50-4053-9cbf-53600a85c359 / got description=852878f1-fbc0-4c13-b426-4a86e89dc3fc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/lib389/idm/user_compare_st_test.py::test_user_compare | 10.45 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logexpirationtime-invalid_vals0-valid_vals0] | 8.71 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[maxlogsize-invalid_vals1-valid_vals1] | 0.18 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logmaxdiskspace-invalid_vals2-valid_vals2] | 0.15 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logminfreediskspace-invalid_vals3-valid_vals3] | 0.14 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[mode-invalid_vals4-valid_vals4] | 0.19 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[maxlogsperdir-invalid_vals5-valid_vals5] | 0.17 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logrotationsynchour-invalid_vals6-valid_vals6] | 0.14 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logrotationsyncmin-invalid_vals7-valid_vals7] | 0.13 | |
No log output captured. | |||
Passed | suites/logging/logging_config_test.py::test_logging_digit_config[logrotationtime-invalid_vals8-valid_vals8] | 2.90 | |
No log output captured. | |||
Passed | suites/mapping_tree/acceptance_test.py::test_invalid_mt | 10.44 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/mapping_tree/be_del_and_default_naming_attr_test.py::test_be_delete | 10.24 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_inverted | 15.47 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_nonexist_parent | 4.58 | |
No log output captured. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_same_length | 5.16 | |
No log output captured. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_flipped_components | 7.08 | |
No log output captured. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_weird_nesting | 5.12 | |
No log output captured. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_mixed_length | 5.40 | |
No log output captured. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_many_shallow | 24.03 | |
No log output captured. | |||
Passed | suites/mapping_tree/mt_cursed_test.py::test_mapping_tree_many_deep_nesting | 70.86 | |
No log output captured. | |||
Passed | suites/mapping_tree/referral_during_tot_init_test.py::test_referral_during_tot | 53.61 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3994013f-9f35-42bd-8b28-22292bb5bf2c / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c0daa648-56da-42be-b0cf-7e57e8b6f8a0 / got description=3994013f-9f35-42bd-8b28-22292bb5bf2c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stderr call------------------------------ [04/Jun/2021:23:22:47.586178719 -0400] - INFO - slapd_exemode_ldif2db - Backend Instance: userRoot | |||
Passed | suites/memberof_plugin/regression_test.py::test_memberof_with_repl | 115.68 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:524 Creating replication topology. INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 2df9a3e9-f492-48d5-8f4c-77a01f18092d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is working INFO lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 03217ff7-e30f-4ece-8b50-5b2489e3ebdc / got description=2df9a3e9-f492-48d5-8f4c-77a01f18092d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working -------------------------------Captured log call-------------------------------- INFO tests.suites.memberof_plugin.regression_test:regression_test.py:77 update cn=101,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO tests.suites.memberof_plugin.regression_test:regression_test.py:77 update cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal | |||
Passed | suites/memberof_plugin/regression_test.py::test_scheme_violation_errors_logged | 32.36 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2882813d-02cc-4d90-b6db-23860b64064e / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 29677b86-dcab-4c01-9fb6-92bd28389c75 / got description=2882813d-02cc-4d90-b6db-23860b64064e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.memberof_plugin.regression_test:regression_test.py:319 memberOf attr value - cn=group1,ou=groups,dc=example,dc=com INFO tests.suites.memberof_plugin.regression_test:regression_test.py:322 pattern = .*oc_check_allowed_sv.*uid=user_,ou=people,dc=example,dc=com.*memberOf.*not allowed.* | |||
Passed | suites/memberof_plugin/regression_test.py::test_memberof_with_changelog_reset | 49.67 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.memberof_plugin.regression_test:regression_test.py:354 Configure memberof on M1 and M2 INFO tests.suites.memberof_plugin.regression_test:regression_test.py:365 On M1, add 999 test entries allowing memberof INFO tests.suites.memberof_plugin.regression_test:regression_test.py:51 Adding 999 users INFO tests.suites.memberof_plugin.regression_test:regression_test.py:368 On M1, add a group with these 999 entries as members INFO tests.suites.memberof_plugin.regression_test:regression_test.py:376 Adding the test group using async function INFO tests.suites.memberof_plugin.regression_test:regression_test.py:386 Check the log messages for error INFO tests.suites.memberof_plugin.regression_test:regression_test.py:390 Check that the replication is working fine both ways, M1 <-> M2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 32093222-ca86-4404-b2f7-d75c58e6e889 / got description=29677b86-dcab-4c01-9fb6-92bd28389c75) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d993ca91-4bd0-43d0-a659-552832ea2f5c / got description=32093222-ca86-4404-b2f7-d75c58e6e889) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d993ca91-4bd0-43d0-a659-552832ea2f5c / got description=32093222-ca86-4404-b2f7-d75c58e6e889) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d993ca91-4bd0-43d0-a659-552832ea2f5c / got description=32093222-ca86-4404-b2f7-d75c58e6e889) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/memberof_plugin/regression_test.py::test_memberof_group | 13.36 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.memberof_plugin.regression_test:regression_test.py:481 Enable memberof plugin and set the scope as cn=sub1,dc=example,dc=com INFO lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:427 Renaming user (cn=g2,cn=sub2,dc=example,dc=com): new cn=g2-new INFO lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m1,cn=sub1,dc=example,dc=com: memberof->b'cn=g2-new,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g1,cn=sub1,dc=example,dc=com' INFO lib389:regression_test.py:440 !!!!!!! uid=test_m2,cn=sub1,dc=example,dc=com: memberof->b'cn=g2-new,cn=sub1,dc=example,dc=com' | |||
Passed | suites/memberof_plugin/regression_test.py::test_entrycache_on_modrdn_failure | 8.84 | |
-------------------------------Captured log call-------------------------------- CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user0,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user1,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user2,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user3,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user4,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user5,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user6,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user7,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user8,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:575 Adding user (cn=user9,ou=people,dc=example,dc=com): INFO lib389:regression_test.py:596 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:596 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:618 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO lib389:regression_test.py:618 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO lib389:regression_test.py:633 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:633 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:633 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:633 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:672 retrieve: cn=group_admin,ou=permissions,dc=example,dc=com with desc=None INFO lib389:regression_test.py:672 retrieve: cn=group_modify,ou=permissions,dc=example,dc=com with desc=None INFO lib389:regression_test.py:672 retrieve: cn=group_in0,ou=people,dc=example,dc=com with desc=b'mygroup' INFO lib389:regression_test.py:672 retrieve: cn=group_in1,ou=people,dc=example,dc=com with desc=b'mygroup' INFO lib389:regression_test.py:672 retrieve: cn=group_out2,dc=example,dc=com with desc=b'this is to check that the entry having this description has the appropriate DN' | |||
Passed | suites/memberof_plugin/regression_test.py::test_silent_memberof_failure | 19.53 | |
-------------------------------Captured log call-------------------------------- CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user0,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user1,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user2,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user3,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user4,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user5,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user6,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user7,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user8,ou=people,dc=example,dc=com): CRITICAL tests.suites.memberof_plugin.regression_test:regression_test.py:738 Adding user (cn=user9,ou=people,dc=example,dc=com): INFO lib389:regression_test.py:759 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:759 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in0,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:781 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO lib389:regression_test.py:781 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_out1,dc=example,dc=com') INFO lib389:regression_test.py:796 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:796 !!!!!!! cn=user0,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:796 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in0,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:796 !!!!!!! cn=user1,ou=people,dc=example,dc=com: memberof->b'cn=group_in1,ou=people,dc=example,dc=com' (vs b'cn=group_in1,ou=people,dc=example,dc=com') INFO lib389:regression_test.py:833 Should assert cn=user2,ou=people,dc=example,dc=com has memberof is False INFO lib389:regression_test.py:833 Should assert cn=user3,ou=people,dc=example,dc=com has memberof is False INFO lib389:regression_test.py:856 Should assert cn=user4,ou=people,dc=example,dc=com has memberof is False INFO lib389:regression_test.py:856 Should assert cn=user5,ou=people,dc=example,dc=com has memberof is False | |||
Passed | suites/monitor/monitor_test.py::test_monitor | 8.38 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.monitor.monitor_test:monitor_test.py:47 connection: ['1:20210605032719Z:3:2:-:cn=directory manager:0:0:0:1:ip=127.0.0.1'], currentconnections: ['1'], totalconnections: ['1'] INFO tests.suites.monitor.monitor_test:monitor_test.py:51 version :: ['1:20210605032719Z:6:5:-:cn=directory manager:0:0:0:1:ip=127.0.0.1'] INFO tests.suites.monitor.monitor_test:monitor_test.py:55 threads: ['16'],currentconnectionsatmaxthreads: ['0'],maxthreadsperconnhits: ['0'] INFO tests.suites.monitor.monitor_test:monitor_test.py:59 nbackends: ['1'], backendmonitordn: ['cn=monitor,cn=userRoot,cn=ldbm database,cn=plugins,cn=config'] INFO tests.suites.monitor.monitor_test:monitor_test.py:63 opsinitiated: ['12'], opscompleted: ['13'] INFO tests.suites.monitor.monitor_test:monitor_test.py:67 dtablesize: ['1024'],readwaiters: ['0'],entriessent: ['14'],bytessent: ['1097'],currenttime: ['20210605032719Z'],starttime: ['20210605032719Z'] | |||
Passed | suites/monitor/monitor_test.py::test_monitor_ldbm | 0.07 | |
No log output captured. | |||
Passed | suites/monitor/monitor_test.py::test_monitor_backend | 0.07 | |
No log output captured. | |||
Passed | suites/monitor/monitor_test.py::test_num_subordinates_with_monitor_suffix | 1.96 | |
No log output captured. | |||
Passed | suites/openldap_2_389/migrate_hdb_test.py::test_migrate_openldap_hdb | 17.41 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ ==== migration plan ==== SchemaAttributeCreate -> ('pseudonym',) SchemaAttributeCreate -> ('email', 'emailAddress', 'pkcs9email') SchemaAttributeCreate -> ('textEncodedORAddress',) SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeCreate -> ('aRecord',) SchemaAttributeCreate -> ('mDRecord',) SchemaAttributeCreate -> ('mXRecord',) SchemaAttributeCreate -> ('nSRecord',) SchemaAttributeCreate -> ('sOARecord',) SchemaAttributeCreate -> ('cNAMERecord',) SchemaAttributeCreate -> ('janetMailbox',) SchemaAttributeCreate -> ('mailPreferenceOption',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('personalSignature',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassCreate -> 0.9.2342.19200300.100.4.15 ('dNSDomain',) may -> ('ARecord', 'MDRecord', 'MXRecord', 'NSRecord', 'SOARecord', 'CNAMERecord') must -> () sup -> ('domain',) SchemaClassCreate -> 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseCreate -> dc=ldapdom,dc=net, 1212d82e-e5e6-103a-9746-d731be523aab DatabaseIndexCreate -> objectClass eq, dc=ldapdom,dc=net DatabaseReindex -> dc=ldapdom,dc=net ==== end migration plan ==== -------------------------------Captured log call-------------------------------- INFO lib389.migrate.openldap.config:config.py:280 Examining OpenLDAP Configuration ... INFO lib389.migrate.openldap.config:config.py:307 Completed OpenLDAP Configuration Parsing. INFO lib389.migrate.plan:plan.py:655 migration: 1 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 2 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 3 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 4 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 5 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 6 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 7 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 8 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 9 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 10 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 11 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 12 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 13 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 14 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 15 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 16 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 17 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 18 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 19 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 20 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 21 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 22 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 23 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 24 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 25 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 26 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 27 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 28 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 29 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 30 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 31 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 32 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 33 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 34 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 35 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 36 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 37 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 38 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 39 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 40 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 41 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 42 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 43 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 44 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 45 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 46 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 47 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 48 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 49 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 50 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 51 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 52 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 53 / 54 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 54 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 1 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 2 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 3 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 4 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 5 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 6 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 7 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 8 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 9 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 10 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 11 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 12 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 13 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 14 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 15 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 16 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 17 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 18 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 19 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 20 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 21 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 22 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 23 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 24 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 25 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 26 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 27 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 28 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 29 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 30 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 31 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 32 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 33 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 34 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 35 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 36 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 37 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 38 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 39 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 40 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 41 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 42 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 43 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 44 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 45 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 46 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 47 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 48 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 49 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 50 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 51 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 52 / 54 complete ... INFO lib389.migrate.plan:plan.py:665 post: 53 / 54 complete ... INFO lib389:tasks.py:837 Index task index_all_06042021_232736 completed successfully INFO lib389.migrate.plan:plan.py:665 post: 54 / 54 complete ... | |||
Passed | suites/openldap_2_389/migrate_test.py::test_parse_openldap_slapdd | 0.21 | |
-------------------------------Captured log call-------------------------------- INFO lib389.migrate.openldap.config:config.py:280 Examining OpenLDAP Configuration ... INFO lib389.migrate.openldap.config:config.py:307 Completed OpenLDAP Configuration Parsing. | |||
Passed | suites/openldap_2_389/migrate_test.py::test_migrate_openldap_slapdd | 43.74 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ ==== migration plan ==== SchemaAttributeCreate -> ('pseudonym',) SchemaAttributeCreate -> ('email', 'emailAddress', 'pkcs9email') SchemaAttributeCreate -> ('textEncodedORAddress',) SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeCreate -> ('aRecord',) SchemaAttributeCreate -> ('mDRecord',) SchemaAttributeCreate -> ('mXRecord',) SchemaAttributeCreate -> ('nSRecord',) SchemaAttributeCreate -> ('sOARecord',) SchemaAttributeCreate -> ('cNAMERecord',) SchemaAttributeCreate -> ('janetMailbox',) SchemaAttributeCreate -> ('mailPreferenceOption',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('personalSignature',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassCreate -> 0.9.2342.19200300.100.4.15 ('dNSDomain',) may -> ('ARecord', 'MDRecord', 'MXRecord', 'NSRecord', 'SOARecord', 'CNAMERecord') must -> () sup -> ('domain',) SchemaClassCreate -> 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseReindex -> dc=example,dc=com PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=com PluginMemberOfFixup -> dc=example,dc=com PluginRefintEnable PluginRefintAttributes -> member PluginRefintAttributes -> memberOf PluginRefintScope -> dc=example,dc=com PluginUniqueConfigure -> dc=example,dc=com, mail 401a528e-eaf5-1039-8667-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=com, uid 401a528e-eaf5-1039-8667-dbfbf2f5e6dd DatabaseCreate -> dc=example,dc=net, 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseIndexCreate -> objectClass eq, dc=example,dc=net DatabaseReindex -> dc=example,dc=net PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=net PluginMemberOfFixup -> dc=example,dc=net PluginUniqueConfigure -> dc=example,dc=net, mail 401a7084-eaf5-1039-866c-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=net, uid 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseLdifImport -> dc=example,dc=com /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_com.slapcat.ldif DatabaseLdifImport -> dc=example,dc=net /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_net.slapcat.ldif ==== end migration plan ==== -------------------------------Captured log call-------------------------------- INFO lib389.migrate.openldap.config:config.py:280 Examining OpenLDAP Configuration ... INFO lib389.migrate.openldap.config:config.py:307 Completed OpenLDAP Configuration Parsing. INFO lib389.migrate.plan:plan.py:655 migration: 1 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 2 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 3 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 4 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 5 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 6 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 7 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 8 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 9 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 10 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 11 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 12 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 13 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 14 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 15 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 16 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 17 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 18 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 19 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 20 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 21 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 22 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 23 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 24 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 25 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 26 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 27 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 28 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 29 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 30 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 31 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 32 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 33 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 34 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 35 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 36 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 37 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 38 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 39 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 40 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 41 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 42 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 43 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 44 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 45 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 46 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 47 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 48 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 49 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 50 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 51 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 52 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 53 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 54 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 55 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 56 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 57 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 58 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 59 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 60 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 61 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 62 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 63 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 64 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 65 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 66 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 67 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 68 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 69 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 70 / 71 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 71 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 1 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 2 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 3 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 4 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 5 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 6 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 7 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 8 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 9 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 10 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 11 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 12 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 13 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 14 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 15 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 16 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 17 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 18 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 19 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 20 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 21 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 22 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 23 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 24 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 25 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 26 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 27 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 28 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 29 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 30 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 31 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 32 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 33 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 34 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 35 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 36 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 37 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 38 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 39 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 40 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 41 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 42 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 43 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 44 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 45 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 46 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 47 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 48 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 49 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 50 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 51 / 71 complete ... INFO lib389:tasks.py:837 Index task index_all_06042021_232815 completed successfully INFO lib389.migrate.plan:plan.py:665 post: 52 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 53 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 54 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 55 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 56 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 57 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 58 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 59 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 60 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 61 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 62 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 63 / 71 complete ... INFO lib389:tasks.py:837 Index task index_all_06042021_232819 completed successfully INFO lib389.migrate.plan:plan.py:665 post: 64 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 65 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 66 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 67 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 68 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 69 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 70 / 71 complete ... INFO lib389.migrate.plan:plan.py:665 post: 71 / 71 complete ... | |||
Passed | suites/openldap_2_389/migrate_test.py::test_migrate_openldap_slapdd_skip_elements | 21.76 | |
------------------------------Captured stdout call------------------------------ ==== migration plan ==== SchemaAttributeUnsupported -> ('otherMailbox',) SchemaAttributeUnsupported -> ('dSAQuality',) SchemaAttributeUnsupported -> ('singleLevelQuality',) SchemaAttributeUnsupported -> ('subtreeMinimumQuality',) SchemaAttributeUnsupported -> ('subtreeMaximumQuality',) SchemaAttributeCreate -> ('suseDefaultBase',) SchemaAttributeCreate -> ('suseNextUniqueId',) SchemaAttributeCreate -> ('suseMinUniqueId',) SchemaAttributeCreate -> ('suseMaxUniqueId',) SchemaAttributeCreate -> ('suseDefaultTemplate',) SchemaAttributeCreate -> ('suseSearchFilter',) SchemaAttributeCreate -> ('suseDefaultValue',) SchemaAttributeCreate -> ('suseNamingAttribute',) SchemaAttributeCreate -> ('suseSecondaryGroup',) SchemaAttributeCreate -> ('suseMinPasswordLength',) SchemaAttributeCreate -> ('suseMaxPasswordLength',) SchemaAttributeCreate -> ('susePasswordHash',) SchemaAttributeCreate -> ('suseSkelDir',) SchemaAttributeCreate -> ('susePlugin',) SchemaAttributeCreate -> ('suseMapAttribute',) SchemaAttributeCreate -> ('suseImapServer',) SchemaAttributeCreate -> ('suseImapAdmin',) SchemaAttributeCreate -> ('suseImapDefaultQuota',) SchemaAttributeCreate -> ('suseImapUseSsl',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.4 ('pilotPerson', 'newPilotPerson') may -> ('userid', 'textEncodedORAddress', 'rfc822Mailbox', 'favouriteDrink', 'roomNumber', 'userClass', 'homeTelephoneNumber', 'homePostalAddress', 'secretary', 'personalTitle', 'preferredDeliveryMethod', 'businessCategory', 'janetMailbox', 'otherMailbox', 'mobileTelephoneNumber', 'pagerTelephoneNumber', 'organizationalStatus', 'mailPreferenceOption', 'personalSignature') must -> () sup -> ('person',) SchemaClassInconsistent -> ( 0.9.2342.19200300.100.4.20 NAME 'pilotOrganization' SUP organization STRUCTURAL MAY buildingName X-ORIGIN 'user defined' ) to 0.9.2342.19200300.100.4.20 ('pilotOrganization',) may -> ('buildingName',) must -> () sup -> ('organization', 'organizationalUnit') SchemaClassUnsupported -> 0.9.2342.19200300.100.4.21 ('pilotDSA',) may -> ('dSAQuality',) must -> () sup -> ('dsa',) SchemaClassUnsupported -> 0.9.2342.19200300.100.4.22 ('qualityLabelledData',) may -> ('subtreeMinimumQuality', 'subtreeMaximumQuality') must -> ('dsaQuality',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:2 ('suseModuleConfiguration',) may -> ('suseDefaultBase',) must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:3 ('suseUserConfiguration',) may -> ('suseMinPasswordLength', 'suseMaxPasswordLength', 'susePasswordHash', 'suseSkelDir', 'suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:4 ('suseObjectTemplate',) may -> ('susePlugin', 'suseDefaultValue', 'suseNamingAttribute') must -> ('cn',) sup -> ('top',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:5 ('suseUserTemplate',) may -> ('suseSecondaryGroup',) must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:6 ('suseGroupTemplate',) may -> () must -> ('cn',) sup -> ('suseObjectTemplate',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:7 ('suseGroupConfiguration',) may -> ('suseNextUniqueId', 'suseMinUniqueId', 'suseMaxUniqueId', 'suseDefaultTemplate', 'suseSearchFilter', 'suseMapAttribute') must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:8 ('suseCaConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:9 ('suseDnsConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:10 ('suseDhcpConfiguration',) may -> () must -> () sup -> ('suseModuleConfiguration',) SchemaClassCreate -> SUSE.YaST.ModuleConfig.OC:11 ('suseMailConfiguration',) may -> () must -> ('suseImapServer', 'suseImapAdmin', 'suseImapDefaultQuota', 'suseImapUseSsl') sup -> ('suseModuleConfiguration',) DatabaseReindex -> dc=example,dc=com PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=com PluginMemberOfFixup -> dc=example,dc=com PluginRefintEnable PluginRefintAttributes -> member PluginRefintAttributes -> memberOf PluginRefintScope -> dc=example,dc=com PluginUniqueConfigure -> dc=example,dc=com, mail 401a528e-eaf5-1039-8667-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=com, uid 401a528e-eaf5-1039-8667-dbfbf2f5e6dd DatabaseReindex -> dc=example,dc=net PluginMemberOfEnable PluginMemberOfScope -> dc=example,dc=net PluginMemberOfFixup -> dc=example,dc=net PluginUniqueConfigure -> dc=example,dc=net, mail 401a7084-eaf5-1039-866c-dbfbf2f5e6dd PluginUniqueConfigure -> dc=example,dc=net, uid 401a7084-eaf5-1039-866c-dbfbf2f5e6dd DatabaseLdifImport -> dc=example,dc=com /export/tests/suites/openldap_2_389/../../data/openldap_2_389/1/example_com.slapcat.ldif ==== end migration plan ==== -------------------------------Captured log call-------------------------------- INFO lib389.migrate.openldap.config:config.py:280 Examining OpenLDAP Configuration ... INFO lib389.migrate.openldap.config:config.py:307 Completed OpenLDAP Configuration Parsing. INFO lib389.migrate.plan:plan.py:655 migration: 1 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 2 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 3 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 4 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 5 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 6 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 7 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 8 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 9 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 10 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 11 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 12 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 13 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 14 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 15 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 16 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 17 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 18 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 19 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 20 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 21 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 22 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 23 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 24 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 25 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 26 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 27 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 28 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 29 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 30 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 31 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 32 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 33 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 34 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 35 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 36 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 37 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 38 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 39 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 40 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 41 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 42 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 43 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 44 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 45 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 46 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 47 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 48 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 49 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 50 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 51 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 52 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 53 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 54 / 55 complete ... INFO lib389.migrate.plan:plan.py:655 migration: 55 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 1 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 2 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 3 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 4 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 5 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 6 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 7 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 8 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 9 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 10 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 11 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 12 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 13 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 14 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 15 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 16 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 17 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 18 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 19 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 20 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 21 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 22 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 23 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 24 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 25 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 26 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 27 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 28 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 29 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 30 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 31 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 32 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 33 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 34 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 35 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 36 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 37 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 38 / 55 complete ... INFO lib389:tasks.py:837 Index task index_all_06042021_232837 completed successfully INFO lib389.migrate.plan:plan.py:665 post: 39 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 40 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 41 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 42 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 43 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 44 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 45 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 46 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 47 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 48 / 55 complete ... INFO lib389:tasks.py:837 Index task index_all_06042021_232839 completed successfully INFO lib389.migrate.plan:plan.py:665 post: 49 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 50 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 51 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 52 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 53 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 54 / 55 complete ... INFO lib389.migrate.plan:plan.py:665 post: 55 / 55 complete ... | |||
Passed | suites/openldap_2_389/password_migrate_test.py::test_migrate_openldap_password_hash | 10.31 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_success[6-5] | 8.61 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:56 Adding user simplepaged_test -------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 5 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:248 Set user bind simplepaged_test INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 6; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c45fc70>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:254 5 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 5 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_success[5-5] | 0.28 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 5 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:248 Set user bind simplepaged_test INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99cc71fd0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:254 5 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 5 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_success[5-25] | 1.04 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 25 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:248 Set user bind simplepaged_test INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c855f40>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:254 25 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 25 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[50-200-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-idlistscanlimit-100-UNWILLING_TO_PERFORM] | 7.78 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 200 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 100. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:301 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:304 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:310 Initiate ldapsearch with created control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:322 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 200 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[5-15-cn=config-nsslapd-timelimit-20-UNAVAILABLE_CRITICAL_EXTENSION] | 30.70 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 15 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-timelimit to 20. Previous value - b'3600'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:301 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:304 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:310 Initiate ldapsearch with created control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:322 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 15 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-timelimit to b'3600'. Previous value - b'20'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-sizelimit-20-SIZELIMIT_EXCEEDED] | 1.97 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 20. Previous value - b'2000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:301 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:304 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:310 Initiate ldapsearch with created control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:322 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'20'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-pagedsizelimit-5-SIZELIMIT_EXCEEDED] | 1.95 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5. Previous value - b'0'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:301 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:304 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:310 Initiate ldapsearch with created control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:322 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_limits_fail[5-50-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-lookthroughlimit-20-ADMINLIMIT_EXCEEDED] | 1.92 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 20. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:301 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:304 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:310 Initiate ldapsearch with created control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:322 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'20'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_sort_success | 1.97 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 50 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:381 Initiate ldapsearch with created control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:382 Collect data with sorting INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99ccb7e20>, <lib389._controls.SSSRequestControl object at 0x7ff99ccb7880>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:387 Substring numbers from user DNs INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:391 Assert that list is sorted INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 50 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_abandon | 5.77 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:423 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:426 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:430 Initiate a search with a paged results control INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:433 Abandon the search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:436 Expect an ldap.TIMEOUT exception, while trying to get the search results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_with_timelimit | 34.40 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:473 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:476 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:481 Iteration 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:488 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:488 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:504 Done with this search - sleeping 10 seconds INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:481 Iteration 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:488 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:488 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:504 Done with this search - sleeping 10 seconds INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:481 Iteration 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:488 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:488 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:504 Done with this search - sleeping 10 seconds INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[ip] | 4.04 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:554 Back up current suffix ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:557 Add test ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:563 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:566 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:570 Initiate three searches with a paged results control INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:572 1 search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c4d7850>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:575 100 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:572 2 search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c4d7850>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:575 100 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:572 3 search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 5; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c4d7850>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 11 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 12 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 13 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 14 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 15 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 16 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 17 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 18 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 19 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:575 100 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:577 If we are here, then no error has happened. We are good. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:580 Restore ACI INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_multiple_paging | 4.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:614 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:617 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:622 Iteration 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:622 Iteration 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:622 Iteration 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_invalid_cookie[1000] | 3.79 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:670 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:673 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:681 Put an invalid cookie (1000) to the control. TypeError is expected INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_invalid_cookie[-1] | 3.84 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 100 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:670 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:673 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:681 Put an invalid cookie (-1) to the control. TypeError is expected INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 100 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_abandon_with_zero_size | 0.50 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:715 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:718 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users | |||
Passed | suites/paged_results/paged_results_test.py::test_search_pagedsizelimit_success | 0.45 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 20. Previous value - b'0'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:765 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99cedbf70>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:773 10 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'20'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[5-15-PASS] | 0.47 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5. Previous value - b'0'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to 15. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:829 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:840 Expect to pass INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c855610>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:842 10 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to None. Previous value - b'15'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[15-5-SIZELIMIT_EXCEEDED] | 0.51 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 15. Previous value - b'0'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to 5. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:829 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:836 Expect to fail with SIZELIMIT_EXCEEDED INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99ceee280>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'15'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedSizeLimit to None. Previous value - b'5'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] | 3.82 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 5000. Previous value - b'2000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5000. Previous value - b'0'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 100. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 100. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:902 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:909 Expect to fail with ADMINLIMIT_EXCEEDED INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99ceda7f0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'5000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'100'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values1-PASS] | 4.06 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to 5000. Previous value - b'2000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to 5000. Previous value - b'0'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 120. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 122. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:902 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:913 Expect to pass INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99ceee280>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:915 101 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-sizelimit to b'2000'. Previous value - b'5000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-pagedsizelimit to b'0'. Previous value - b'5000'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'122'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'120'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] | 3.72 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 1000. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 1000. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to 100. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to 100. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:977 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:984 Expect to fail with ADMINLIMIT_EXCEEDED INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c4a86d0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to None. Previous value - b'100'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to None. Previous value - b'100'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values1-PASS] | 3.67 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to 1000. Previous value - b'5000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to 1000. Previous value - b'4000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to 120. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to 122. Previous value - None. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:977 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:988 Expect to pass INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 10; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99cb6f6a0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 6 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 7 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 8 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 9 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 10 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:990 101 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 101 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-lookthroughlimit to b'5000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-idlistscanlimit to b'4000'. Previous value - b'1000'. Modified suffix - cn=config,cn=ldbm database,cn=plugins,cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedIDListScanLimit to None. Previous value - b'120'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsPagedLookthroughLimit to None. Previous value - b'122'. Modified suffix - uid=simplepaged_test,ou=People,dc=example,dc=com. | |||
Passed | suites/paged_results/paged_results_test.py::test_ger_basic | 0.77 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c790fd0>, <ldap.controls.simple.GetEffectiveRightsControl object at 0x7ff99c790130>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1029 20 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1031 Check for attributeLevelRights INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1034 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users | |||
Passed | suites/paged_results/paged_results_test.py::test_multi_suffix_search | 7.72 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:87 Adding suffix:o=test_parent and backend: parent_base INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:96 Adding ACI to allow our test user to search INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:108 Adding suffix:ou=child,o=test_parent and backend: child_base -------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1066 Clear the access log INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: o=test_parent; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c45f310>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 5 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1077 20 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1080 Restart the server to flush the logs INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1086 Assert that last pr_cookie == -1 and others pr_cookie == 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1091 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 10 users | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[None] | 1.25 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1125 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99cc75520>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1132 20 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1135 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[-1] | 1.21 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to -1. Previous value - b'-1'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1125 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c45a2b0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1132 20 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1135 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'-1'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[1000] | 1.37 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 1000. Previous value - b'-1'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1125 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:191 Running simple paged result search with - search suffix: dc=example,dc=com; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl object at 0x7ff99c7a9ca0>]. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 0 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 1 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 2 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 3 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:199 Getting page 4 INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1132 20 results INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1135 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'1000'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[0] | 0.96 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 0. Previous value - b'-1'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1171 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1174 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1189 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'0'. Modified suffix - cn=config. | |||
Passed | suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[1] | 2.37 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:133 Adding 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to 1. Previous value - b'-1'. Modified suffix - cn=config. INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1171 Set user bind INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1174 Create simple paged results control instance INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:1189 Remove added users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:155 Deleting 20 users INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:169 Set nsslapd-maxsimplepaged-per-conn to b'-1'. Previous value - b'1'. Modified suffix - cn=config. -----------------------------Captured log teardown------------------------------ INFO tests.suites.paged_results.paged_results_test:paged_results_test.py:74 Deleting user simplepaged_test | |||
Passed | suites/password/password_policy_test.py::test_password_change_section | 10.46 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/password_policy_test.py::test_password_syntax_section | 3.23 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_password_history_section | 8.62 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_password_minimum_age_section | 11.25 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_account_lockout_and_lockout_duration_section | 6.39 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_grace_limit_section | 17.82 | |
No log output captured. | |||
Passed | suites/password/password_policy_test.py::test_additional_corner_cases | 2.98 | |
No log output captured. | |||
Passed | suites/password/password_test.py::test_password_delete_specific_password | 10.29 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.password_test:password_test.py:44 Running test_password_delete_specific_password... INFO tests.suites.password.password_test:password_test.py:65 test_password_delete_specific_password: PASSED | |||
Passed | suites/password/pbkdf2_upgrade_plugin_test.py::test_pbkdf2_upgrade | 19.96 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_bypass | 8.86 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:46 test_pwdAdmin_init: Creating Password Administrator entries... INFO tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:85 test_pwdAdmin_init: Configuring password policy... INFO tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:100 Add aci to allow password admin to add/update entries... INFO tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:115 test_pwdAdmin_init: Bind as the Password Administrator (before activating)... INFO tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:128 test_pwdAdmin_init: Attempt to add entries with invalid passwords, these adds should fail... -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdAdmin_test:pwdAdmin_test.py:167 test_pwdAdmin: Activate the Password Administator... | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_no_admin | 0.17 | |
No log output captured. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_modify | 0.23 | |
No log output captured. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_group | 0.21 | |
No log output captured. | |||
Passed | suites/password/pwdAdmin_test.py::test_pwdAdmin_config_validation | 1.37 | |
No log output captured. | |||
Passed | suites/password/pwdModify_test.py::test_pwd_modify_with_different_operation | 17.26 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdModify_test:pwdModify_test.py:101 Attempt for Password change for an entry that does not exists INFO tests.suites.password.pwdModify_test:pwdModify_test.py:103 Attempt for Password change for an entry that exists INFO tests.suites.password.pwdModify_test:pwdModify_test.py:105 Attempt for Password change to old for an entry that exists INFO tests.suites.password.pwdModify_test:pwdModify_test.py:107 Attempt for Password Change with Binddn as testuser but with wrong old password INFO tests.suites.password.pwdModify_test:pwdModify_test.py:111 Attempt for Password Change with Binddn as testuser INFO tests.suites.password.pwdModify_test:pwdModify_test.py:113 Attempt for Password Change without giving newpassword INFO tests.suites.password.pwdModify_test:pwdModify_test.py:116 Change password to NEW_PASSWD i.e newpassword INFO tests.suites.password.pwdModify_test:pwdModify_test.py:119 Check binding with old/new password INFO tests.suites.password.pwdModify_test:pwdModify_test.py:124 Change password back to OLD_PASSWD i.e password INFO tests.suites.password.pwdModify_test:pwdModify_test.py:127 Checking password change Operation using a Non-Secure connection INFO tests.suites.password.pwdModify_test:pwdModify_test.py:131 Testuser attempts to change password for testuser2(userPassword attribute is Set) INFO tests.suites.password.pwdModify_test:pwdModify_test.py:147 Directory Manager attempts to change password for testuser2(userPassword attribute is Set) INFO tests.suites.password.pwdModify_test:pwdModify_test.py:150 Changing userPassword attribute to Undefined for testuser2 INFO tests.suites.password.pwdModify_test:pwdModify_test.py:152 Testuser attempts to change password for testuser2(userPassword attribute is Undefined) INFO tests.suites.password.pwdModify_test:pwdModify_test.py:156 Directory Manager attempts to change password for testuser2(userPassword attribute is Undefined) INFO tests.suites.password.pwdModify_test:pwdModify_test.py:159 Create a password syntax policy. Attempt to change to password that violates that policy INFO tests.suites.password.pwdModify_test:pwdModify_test.py:163 Reset password syntax policy INFO tests.suites.password.pwdModify_test:pwdModify_test.py:165 userPassword mod with control results in ber decode error INFO tests.suites.password.pwdModify_test:pwdModify_test.py:169 Reseting the testuser's password | |||
Passed | suites/password/pwdModify_test.py::test_pwd_modify_with_password_policy | 0.62 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdModify_test:pwdModify_test.py:44 Change the pwd storage type to clear and change the password once to refresh it(for the rest of tests -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdModify_test:pwdModify_test.py:192 Change the password and check that a new entry has been added to the history INFO tests.suites.password.pwdModify_test:pwdModify_test.py:196 Try changing password to one stored in history. Should fail INFO tests.suites.password.pwdModify_test:pwdModify_test.py:199 Change the password several times in a row, and try binding after each change | |||
Passed | suites/password/pwdModify_test.py::test_pwd_modify_with_subsuffix | 3.19 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdModify_test:pwdModify_test.py:228 Add a new SubSuffix INFO tests.suites.password.pwdModify_test:pwdModify_test.py:235 Add the container & create password policies INFO tests.suites.password.pwdModify_test:pwdModify_test.py:243 Add two New users under the SubEntry INFO tests.suites.password.pwdModify_test:pwdModify_test.py:265 Changing password of uid=test_user0,ou=TestPeople_bug834047,dc=example,dc=com to newpassword INFO tests.suites.password.pwdModify_test:pwdModify_test.py:270 Try to delete password- case when password is specified INFO tests.suites.password.pwdModify_test:pwdModify_test.py:274 Try to delete password- case when password is not specified | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_pwdReset_by_user_DM | 13.74 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:40 Adding test user {} -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:97 Set password policy passwordMustChange on INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:105 Binding as the Directory manager and attempt to change the pwdReset to FALSE INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:110 Check that pwdReset is NOT SET to FALSE INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:113 Resetting password for simplepaged_test | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_pwd_reset | 1.96 | |
No log output captured. | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-off-UNWILLING_TO_PERFORM] | 2.21 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:67 Create password policy for subtree ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:70 Create password policy for user uid=simplepaged_test,ou=people,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:199 Set passwordChange to "on" - ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:206 Set passwordChange to "off" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:215 Bind as user and modify userPassword INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:227 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-off-UNWILLING_TO_PERFORM] | 2.14 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:199 Set passwordChange to "off" - ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:206 Set passwordChange to "off" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:215 Bind as user and modify userPassword INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:227 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-on-False] | 2.16 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:199 Set passwordChange to "off" - ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:206 Set passwordChange to "on" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:215 Bind as user and modify userPassword INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:227 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-on-False] | 2.17 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:199 Set passwordChange to "on" - ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:206 Set passwordChange to "on" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:215 Bind as user and modify userPassword INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:227 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_pwd_min_age | 14.33 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:264 Set passwordminage to "10" - ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:269 Set passwordminage to "10" - uid=simplepaged_test,ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:273 Set passwordminage to "10" - cn=config INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:278 Bind as user and modify userPassword INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:284 Bind as user and modify userPassword straight away after previous change INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:289 Wait 12 second INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:293 Bind as user and modify userPassword INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:301 Bind as DM | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_maxuse_1 | 13.67 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:366 1th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 1 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:366 2th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 2 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:366 3th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:366 4th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 4 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:366 5th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 5 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:377 last failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 5 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:391 failing bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 9 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 7 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 8 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 9 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 10 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 11 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 12 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 13 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 14 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 15 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:410 Rejected bind (CONSTRAINT_VIOLATION) => pwdTPRUseCount = 16 | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_maxuse_2 | 12.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:480 1th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 1 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:480 2th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 2 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:480 3th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:491 last failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_maxuse_3 | 10.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:587 1th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 1 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:587 2th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 2 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:587 3th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:598 last failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_maxuse_4 | 9.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:709 1th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 1 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:709 2th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 2 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:709 3th failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:720 last failing bind (INVALID_CREDENTIALS) => pwdTPRUseCount = 3 | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_delayValidFrom_1 | 5.05 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:798 compare pwdTPRValidFrom (20210605033506Z) vs now (time.struct_time(tm_year=2021, tm_mon=6, tm_mday=5, tm_hour=3, tm_min=34, tm_sec=57, tm_wday=5, tm_yday=156, tm_isdst=0)) | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_delayValidFrom_2 | 12.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:865 compare pwdTPRValidFrom (20210605033507Z) vs now (time.struct_time(tm_year=2021, tm_mon=6, tm_mday=5, tm_hour=3, tm_min=35, tm_sec=2, tm_wday=5, tm_yday=156, tm_isdst=0)) | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_delayValidFrom_3 | 12.02 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:957 compare pwdTPRValidFrom (20210605033519Z) vs now (time.struct_time(tm_year=2021, tm_mon=6, tm_mday=5, tm_hour=3, tm_min=35, tm_sec=14, tm_wday=5, tm_yday=156, tm_isdst=0)) | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_delayExpireAt_1 | 12.39 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:1038 compare pwdTPRExpireAt (20210605033531Z) vs now (time.struct_time(tm_year=2021, tm_mon=6, tm_mday=5, tm_hour=3, tm_min=35, tm_sec=26, tm_wday=5, tm_yday=156, tm_isdst=0)) | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_delayExpireAt_2 | 6.78 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:1108 compare pwdTPRExpireAt (20210605033544Z) vs now (time.struct_time(tm_year=2021, tm_mon=6, tm_mday=5, tm_hour=3, tm_min=35, tm_sec=39, tm_wday=5, tm_yday=156, tm_isdst=0)) | |||
Passed | suites/password/pwdPolicy_attribute_test.py::test_global_tpr_delayExpireAt_3 | 9.58 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:1202 compare pwdTPRExpireAt (20210605033551Z) vs now (time.struct_time(tm_year=2021, tm_mon=6, tm_mday=5, tm_hour=3, tm_min=35, tm_sec=45, tm_wday=5, tm_yday=156, tm_isdst=0)) -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_attribute_test:pwdPolicy_attribute_test.py:55 Deleting user uid=simplepaged_test,ou=people,dc=example,dc=com | |||
Passed | suites/password/pwdPolicy_controls_sequence_test.py::test_controltype_expired_grace_limit | 15.44 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:103 Configure password policy with grace limit set to 2 INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:108 Change password and wait for it to expire INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:76 Reset user password as the user, then re-bind as Directory Manager INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:112 Bind and use up one grace login (only one left) INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:58 Bind as the user, and return any controls INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:117 Bind again and check the sequence INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:58 Bind as the user, and return any controls INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:122 Bind with expired grace login and check the sequence INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:58 Bind as the user, and return any controls INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:67 Got expected error: {'msgtype': 97, 'msgid': 19, 'result': 49, 'desc': 'Invalid credentials', 'ctrls': [('1.3.6.1.4.1.42.2.27.8.5.1', 0, b'0\x84\x00\x00\x00\x03\x81\x01\x00'), ('2.16.840.1.113730.3.4.4', 0, b'0')], 'info': 'password expired!'} -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_controls_sequence_test:pwdPolicy_controls_sequence_test.py:50 Delete test user | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_must_change | 10.46 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:109 Configure password policy with paswordMustChange set to "on" INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:120 Reset userpassword as Directory Manager INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:125 Bind should return ctrl with error code 2 (changeAfterReset) | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_expired_grace_limit | 6.42 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:159 Configure password policy with grace limit set tot 2 INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:164 Change password and wait for it to expire INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:168 Bind and use up one grace login (only one left) INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:178 Use up last grace login, should get control INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:184 No grace login available, bind should fail, and no control should be returned | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_expiring_with_warning | 5.31 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:206 Configure password policy INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:211 Change password and get controls INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:222 Warning has been sent, try the bind again, and recheck the expiring time | |||
Passed | suites/password/pwdPolicy_controls_test.py::test_pwd_expiring_with_no_warning | 8.24 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:250 Configure password policy INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:255 When the warning is less than the max age, we never send expiring control response INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:262 Turn on sending expiring control regardless of warning INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:274 Check expiring time again INFO tests.suites.password.pwdPolicy_controls_test:pwdPolicy_controls_test.py:285 Turn off sending expiring control (restore the default setting) | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-off] | 10.33 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:39 Adding user uid=buser,ou=People,dc=example,dc=com INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:46 Adding an aci for the bind user INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:71 Enable fine-grained policy INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions. | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[on-off] | 1.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions. | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-on] | 1.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:119 Set nsslapd-pwpolicy-inherit-global to off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:120 Set passwordCheckSyntax to on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is off INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:129 Bind as test user INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:132 Make sure an entry added to ou=people has no password syntax restrictions. | |||
Passed | suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions | 2.79 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:173 Set nsslapd-pwpolicy-inherit-global to on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:174 Set passwordCheckSyntax to on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of nsslapd-pwpolicy-inherit-global is on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:92 Default value of passwordCheckSyntax is on INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:187 Bind as test user INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:192 Try to add user with a short password (<9) INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:197 Try to add user with a long password (>9) INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:201 Bind as DM user -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_inherit_global_test:pwdPolicy_inherit_global_test.py:53 Deleting user uid=buser,ou=People,dc=example,dc=com | |||
Passed | suites/password/pwdPolicy_syntax_test.py::test_basic | 13.35 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:34 Enable global password policy. Check for syntax. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinLength: length too short INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinDigits: does not contain minimum number of digits INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinAlphas: does not contain minimum number of alphas INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxRepeats: too many repeating characters INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinSpecials: does not contain minimum number of special characters INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinLowers: does not contain minimum number of lowercase characters INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMinUppers: does not contain minimum number of lowercase characters INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordDictCheck: Password found in dictionary INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordPalindrome: Password is palindrome INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSequence: Max monotonic sequence is not allowed INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxSeqSets: Max monotonic sequence is not allowed INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordMaxClassChars: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry | |||
Passed | suites/password/pwdPolicy_syntax_test.py::test_config_set_few_user_attributes | 6.75 | |
-------------------------------Captured log call-------------------------------- INFO lib389:pwdPolicy_syntax_test.py:316 Set passwordUserAttributes to "description loginShell" INFO lib389:pwdPolicy_syntax_test.py:320 Verify passwordUserAttributes has the values INFO lib389:pwdPolicy_syntax_test.py:324 Reset passwordUserAttributes INFO lib389:pwdPolicy_syntax_test.py:327 Verify passwordUserAttributes enforced the policy INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordUserAttributes: Password found in user entry | |||
Passed | suites/password/pwdPolicy_syntax_test.py::test_config_set_few_bad_words | 9.44 | |
-------------------------------Captured log call-------------------------------- INFO lib389:pwdPolicy_syntax_test.py:356 Set passwordBadWords to "fedora redhat" INFO lib389:pwdPolicy_syntax_test.py:361 Verify passwordBadWords has the values INFO lib389:pwdPolicy_syntax_test.py:365 Reset passwordBadWords INFO lib389:pwdPolicy_syntax_test.py:368 Verify passwordBadWords enforced the policy INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class INFO tests.suites.password.pwdPolicy_syntax_test:pwdPolicy_syntax_test.py:106 Invalid password correctly rejected by passwordBadWords: Too may consecutive characters from the same class | |||
Passed | suites/password/pwdPolicy_token_test.py::test_token_lengths | 13.28 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 4 token (test) INFO tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 12, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} INFO tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 6 token (test_u) INFO tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 16, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} INFO tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:67 Testing password len 10 token (test_user1) INFO tests.suites.password.pwdPolicy_token_test:pwdPolicy_token_test.py:72 Password correctly rejected: {'msgtype': 103, 'msgid': 20, 'result': 19, 'desc': 'Constraint violation', 'ctrls': [], 'info': 'invalid password syntax - password based off of user entry'} | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[ ] | 8.42 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:209 An invalid value is being tested INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:213 Now check the value is unchanged INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:216 Invalid value was rejected correctly | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[junk123] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:209 An invalid value is being tested INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to junk123 INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:213 Now check the value is unchanged INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:216 Invalid value junk123 was rejected correctly | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[on] | 1.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:218 A valid value is being tested INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to on INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:221 Now check that the value has been changed INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:224 passwordSendExpiringTime is now set to on INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:226 Set passwordSendExpiringTime back to the default value INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_different_values[off] | 1.33 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:205 Get the default value INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:218 A valid value is being tested INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:221 Now check that the value has been changed INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:224 passwordSendExpiringTime is now set to off INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:226 Set passwordSendExpiringTime back to the default value INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_expiry_time | 0.69 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:257 Get the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:258 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:262 Check whether the time is returned INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:265 user's password will expire in 172800 seconds INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:268 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:62 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordSendExpiringTime-off] | 1.18 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:300 Set configuration parameter INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordSendExpiringTime to off INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:303 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:307 Check the state of the control INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:309 Password Expiry warning time is not returned as passwordSendExpiringTime is set to off INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:315 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:62 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordWarning-3600] | 1.17 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:300 Set configuration parameter INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:171 Setting passwordWarning to 3600 INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:303 Binding with (uid=tuser,ou=people,dc=example,dc=com) and requesting password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:307 Check the state of the control INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:312 (uid=tuser,ou=people,dc=example,dc=com) password will expire in 172800 seconds INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:315 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:62 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_with_different_password_states | 0.73 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:348 Expire user's password by changing passwordExpirationTime timestamp INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:352 Old passwordExpirationTime: 20210607033752Z INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:355 New passwordExpirationTime: 20210507033752Z INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:358 Attempting to bind with user uid=tuser,ou=people,dc=example,dc=com and retrive the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:362 Bind Failed, error: <ExceptionInfo INVALID_CREDENTIALS({'msgtype': 97, 'msgid': 73, 'result': 49, 'desc': 'Invalid credentials', 'ctrls': [('1.3.6.1.4.1.....8.5.1', 0, b'0\x84\x00\x00\x00\x03\x81\x01\x00'), ('2.16.840.1.113730.3.4.4', 0, b'0')], 'info': 'password expired!'}) tblen=10> INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:364 Rebinding as DM INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:367 Reverting back user's passwordExpirationTime INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:371 Rebinding with uid=tuser,ou=people,dc=example,dc=com and retrieving the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:374 Check that the control is returned INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:377 user's password will expire in 172800 seconds INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:380 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:62 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_default_behavior | 0.68 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:84 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:90 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:406 Binding with uid=tuser,ou=people,dc=example,dc=com and requesting the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:410 Check that no control is returned INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:413 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:99 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_when_maxage_and_warning_are_the_same | 2.97 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:84 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:90 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:442 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:446 First change user's password to reset its password expiration time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:453 Binding with uid=tuser,ou=people,dc=example,dc=com and requesting the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:457 Check that control is returned evenif passwordSendExpiringTime is set to off INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:461 user's password will expire in 86400 seconds INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:463 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:99 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_with_local_policy | 1.58 | |
-----------------------------Captured stdout setup------------------------------ Successfully created user password policy -------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:113 Add the user INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:141 Setting fine grained policy for user (uid=tuser,ou=people,dc=example,dc=com) -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:490 Attempting to get password expiry warning time for user uid=tuser,ou=people,dc=example,dc=com INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:158 Bind with the user and request the password expiry warning time INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:493 Check that the control is not returned INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:496 Password expiry warning time is not returned INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:498 Rebinding as DM -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:130 Remove the user entry INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:62 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_search_shadowWarning_when_passwordWarning_is_lower | 0.67 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:46 Get the default values INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:53 Set the new values -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:530 Bind as cn=Directory Manager INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:533 Creating test user INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:538 Setting passwordWarning to smaller value than 86400 INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:541 Bind as test user INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:544 Check if attribute shadowWarning is present -----------------------------Captured log teardown------------------------------ INFO tests.suites.password.pwdPolicy_warning_test:pwdPolicy_warning_test.py:62 Reset the defaults | |||
Passed | suites/password/pwdPolicy_warning_test.py::test_password_expire_works | 2.37 | |
No log output captured. | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CLEAR] | 8.67 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test CLEAR PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test CRYPT PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-MD5] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test CRYPT-MD5 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-SHA256] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test CRYPT-SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[CRYPT-SHA512] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test CRYPT-SHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[MD5] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test MD5 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SHA PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA256] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA384] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SHA384 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SHA512] | 0.15 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SMD5] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SMD5 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SSHA PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA256] | 0.21 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SSHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA384] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SSHA384 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[SSHA512] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test SSHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[PBKDF2_SHA256] | 0.29 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test PBKDF2_SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[DEFAULT] | 0.29 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test DEFAULT PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[PBKDF2-SHA1] | 0.15 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test PBKDF2-SHA1 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[PBKDF2-SHA256] | 0.16 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test PBKDF2-SHA256 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[PBKDF2-SHA512] | 0.19 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test PBKDF2-SHA512 PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pwd_algo_test[GOST_YESCRYPT] | 0.29 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:151 Test GOST_YESCRYPT PASSED | |||
Passed | suites/password/pwd_algo_test.py::test_pbkdf2_algo | 3.54 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_algo_test:pwd_algo_test.py:183 Test PASSED | |||
Passed | suites/password/pwd_lockout_bypass_test.py::test_lockout_bypass | 10.33 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwd_log_test.py::test_hide_unhashed_pwd | 15.40 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwd_log_test:pwd_log_test.py:79 Test complete | |||
Passed | suites/password/pwd_upgrade_on_bind_test.py::test_password_hash_on_upgrade | 8.35 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwd_upgrade_on_bind_test.py::test_password_hash_on_upgrade_clearcrypt | 0.12 | |
No log output captured. | |||
Passed | suites/password/pwd_upgrade_on_bind_test.py::test_password_hash_on_upgrade_disable | 1.93 | |
No log output captured. | |||
Passed | suites/password/pwp_gracel_test.py::test_password_gracelimit_section | 22.97 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwp_history_test.py::test_history_is_not_overwritten | 13.13 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwp_history_test:pwp_history_test.py:77 Configured password policy. | |||
Passed | suites/password/pwp_history_test.py::test_basic | 9.69 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.pwp_history_test:pwp_history_test.py:152 Configured password policy. INFO tests.suites.password.pwp_history_test:pwp_history_test.py:170 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:201 Correct number of passwords found in history. INFO tests.suites.password.pwp_history_test:pwp_history_test.py:212 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:222 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:232 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:254 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:267 Configured passwordInHistory to 0. INFO tests.suites.password.pwp_history_test:pwp_history_test.py:283 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:299 Configured passwordInHistory to 2. INFO tests.suites.password.pwp_history_test:pwp_history_test.py:312 Password change correctly rejected INFO tests.suites.password.pwp_history_test:pwp_history_test.py:326 Test suite PASSED. | |||
Passed | suites/password/pwp_test.py::test_passwordchange_to_no | 8.71 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/pwp_test.py::test_password_check_syntax | 1.13 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_too_big_password | 0.76 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_pwminage | 3.24 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_invalid_credentials | 7.66 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_expiration_date | 1.30 | |
No log output captured. | |||
Passed | suites/password/pwp_test.py::test_passwordlockout | 5.09 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_local_password_policy | 8.82 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/password/regression_of_bugs_test.py::test_passwordexpirationtime_attribute | 3.55 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_admin_group_to_modify_password | 1.62 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_password_max_failure_should_lockout_password | 0.21 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_pwd_update_time_attribute | 3.27 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_password_track_update_time | 7.27 | |
No log output captured. | |||
Passed | suites/password/regression_of_bugs_test.py::test_signal_11 | 1.11 | |
No log output captured. | |||
Passed | suites/password/regression_test.py::test_pwp_local_unlock | 13.51 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.password.regression_test:regression_test.py:66 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to on INFO tests.suites.password.regression_test:regression_test.py:73 Configure subtree password policy for ou=people,dc=example,dc=com INFO tests.suites.password.regression_test:regression_test.py:96 Adding user-uid=UIDpwtest1,ou=people,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:135 Verify user can bind... INFO tests.suites.password.regression_test:regression_test.py:138 Test passwordUnlock default - user should be able to reset password after lockout INFO tests.suites.password.regression_test:regression_test.py:149 Verify account is locked INFO tests.suites.password.regression_test:regression_test.py:153 Wait for lockout duration... INFO tests.suites.password.regression_test:regression_test.py:156 Check if user can now bind with correct password | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with CNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[SNpwtest1] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with SNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[UIDpwtest1] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with UIDpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[MAILpwtest1@redhat.com] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with MAILpwtest1@redhat.com | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[GNpwtest1] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with GNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZZZ] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with CNpwtest1ZZZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZZZCNpwtest1] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZZZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZCNpwtest1] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1Z] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with CNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZCNpwtest1Z] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZCNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZCNpwtest1] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZ] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with CNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZCNpwtest1ZZ] | 0.15 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZZCNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZCNpwtest1] | 0.14 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[CNpwtest1ZZZ] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with CNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZCNpwtest1ZZZ] | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZZZCNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_trivial_passw_check[ZZZZZZCNpwtest1ZZZZZZZZ] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:181 Replace userPassword attribute with ZZZZZZCNpwtest1ZZZZZZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with CNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[SNpwtest1] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with SNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[UIDpwtest1] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with UIDpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[MAILpwtest1@redhat.com] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with MAILpwtest1@redhat.com | |||
Passed | suites/password/regression_test.py::test_global_vs_local[GNpwtest1] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with GNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with CNpwtest1ZZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZZZCNpwtest1] | 0.19 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZZZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZCNpwtest1] | 0.17 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1Z] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with CNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZCNpwtest1Z] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZCNpwtest1Z | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZCNpwtest1] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with CNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZCNpwtest1ZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZZCNpwtest1ZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZCNpwtest1] | 0.20 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZZZCNpwtest1 | |||
Passed | suites/password/regression_test.py::test_global_vs_local[CNpwtest1ZZZ] | 0.19 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with CNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZCNpwtest1ZZZ] | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZZZCNpwtest1ZZZ | |||
Passed | suites/password/regression_test.py::test_global_vs_local[ZZZZZZCNpwtest1ZZZZZZZZ] | 0.20 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.password.regression_test:regression_test.py:209 Configure Pwpolicy with PasswordCheckSyntax and nsslapd-pwpolicy-local set to off INFO tests.suites.password.regression_test:regression_test.py:214 Replace userPassword attribute with ZZZZZZCNpwtest1ZZZZZZZZ | |||
Passed | suites/plugins/acceptance_test.py::test_acctpolicy | 31.07 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/acceptance_test.py::test_attruniq | 17.40 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_automember | 24.51 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_dna | 17.33 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_linkedattrs | 24.48 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_memberof | 34.35 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_mep | 17.27 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_passthru | 22.03 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_referint | 12.76 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_retrocl | 22.02 | |
No log output captured. | |||
Passed | suites/plugins/acceptance_test.py::test_rootdn | 38.06 | |
No log output captured. | |||
Passed | suites/plugins/accpol_test.py::test_glact_inact | 40.88 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.utils:accpol_test.py:35 Configuring Global account policy plugin, pwpolicy attributes and restarting the server -------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:348 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:351 Sleep for 10 secs to check if account is not inactivated, expected value 0 INFO lib389.utils:accpol_test.py:353 Account should not be inactivated since AccountInactivityLimit not exceeded INFO lib389.utils:accpol_test.py:356 Sleep for 3 more secs to check if account is inactivated INFO lib389.utils:accpol_test.py:360 Sleep +10 secs to check if account glinactusr3 is inactivated INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glremv_lastlogin | 19.16 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:394 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:396 Sleep for 6 secs to check if account is not inactivated, expected value 0 INFO lib389.utils:accpol_test.py:398 Account should not be inactivated since AccountInactivityLimit not exceeded INFO lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account INFO lib389.utils:accpol_test.py:401 Sleep for 7 more secs to check if account is inactivated INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:405 Check if account is activated, expected 0 INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glact_login | 23.61 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:434 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:436 Sleep for 13 secs to check if account is inactivated, expected error 19 INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:440 Check if account is activated, expected 0 INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glinact_limit | 122.70 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:492 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:494 Sleep for 9 secs to check if account is not inactivated, expected 0 INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:516 Check if account is activated, expected 0 INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnologin_attr | 85.83 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:576 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:577 Set attribute StateAttrName to createTimestamp, loginTime attr wont be considered INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:581 Sleep for 9 secs to check if account is not inactivated, expected 0 INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:606 Set attribute StateAttrName to lastLoginTime, the default INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:610 Check if account is activated, expected 0 INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnoalt_stattr | 56.21 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:642 Set attribute altStateAttrName to 1.1 INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:646 Sleep for 13 secs to check if account is not inactivated, expected 0 INFO lib389.utils:accpol_test.py:649 lastLoginTime attribute is added from the above ldap bind by userdn INFO lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glattr_modtime | 45.30 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:691 Set attribute altStateAttrName to modifyTimestamp INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:695 Sleep for 13 secs to check if account is inactivated, expected 0 INFO lib389.utils:accpol_test.py:202 Check ModifyTimeStamp attribute present for user INFO lib389.utils:accpol_test.py:237 Enable account by replacing cn attribute value, value of modifyTimeStamp changed INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:254 Delete lastLoginTime/createTimeStamp/ModifyTimeStamp attribute from user account INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnoalt_nologin | 51.35 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:748 Set attribute altStateAttrName to 1.1 INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:750 Set attribute alwaysrecordlogin to No INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:754 Sleep for 13 secs to check if account is not inactivated, expected 0 INFO lib389.utils:accpol_test.py:759 Set attribute altStateAttrName to createTimestamp INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:764 Reset the default attribute values INFO lib389.utils:accpol_test.py:189 Modify attribute value for a given DN INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glinact_nsact | 22.62 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:807 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:809 Sleep for 3 secs to check if account is not inactivated, expected value 0 INFO lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO lib389.utils:accpol_test.py:166 Running unlock for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'unlock', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:180 output: b'Error: Account is already active\n' INFO lib389.utils:accpol_test.py:812 Sleep for 10 secs to check if account is inactivated, expected value 19 INFO lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO lib389.utils:accpol_test.py:166 Running unlock for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'unlock', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:180 output: b'Error: 103 - 22 - 16 - No such attribute - []\n' INFO lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO lib389.utils:accpol_test.py:166 Running entry-status for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'entry-status', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:180 output: b'Entry DN: uid=nsactusr1,ou=groups,dc=example,dc=com\nEntry Creation Date: 20210605035321Z (2021-06-05 03:53:21)\nEntry Modification Date: 20210605035321Z (2021-06-05 03:53:21)\nEntry Last Login Date: 20210605035321Z (2021-06-05 03:53:21)\nEntry Time Since Inactive: 5 seconds (2021-06-05 04:53:33)\nEntry State: inactivity limit exceeded\n\n' INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:162 Account activate/in-activate/status using dsidm INFO lib389.utils:accpol_test.py:166 Running entry-status for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:174 Running ['/usr/sbin/dsidm', 'slapd-standalone1', '-b', 'dc=example,dc=com', 'account', 'entry-status', 'uid=nsactusr1,ou=groups,dc=example,dc=com'] for user uid=nsactusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:180 output: b'Entry DN: uid=nsactusr1,ou=groups,dc=example,dc=com\nEntry Creation Date: 20210605035321Z (2021-06-05 03:53:21)\nEntry Modification Date: 20210605035339Z (2021-06-05 03:53:39)\nEntry Last Login Date: 20210605035341Z (2021-06-05 03:53:41)\nEntry Time Until Inactive: 11 seconds (2021-06-05 04:53:53)\nEntry State: activated\n\n' INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glinact_acclock | 40.34 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:854 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:856 Sleep for 3 secs and try invalid binds to lockout the user INFO lib389.utils:accpol_test.py:118 Lockout user account by attempting invalid password binds INFO lib389.utils:accpol_test.py:860 Sleep for 10 secs to check if account is inactivated, expected value 19 INFO lib389.utils:accpol_test.py:864 Add lastLoginTime to activate the user account INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:868 Checking if account is unlocked after passwordlockoutduration, but inactivated after accountInactivityLimit INFO lib389.utils:accpol_test.py:118 Lockout user account by attempting invalid password binds INFO lib389.utils:accpol_test.py:872 Account is expected to be unlocked after 5 secs of passwordlockoutduration INFO lib389.utils:accpol_test.py:876 Sleep 13s and check if account inactivated based on accountInactivityLimit, expected 19 INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_glnact_pwexp | 49.37 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:921 AccountInactivityLimit set to 12. Account will be inactivated if not accessed in 12 secs INFO lib389.utils:accpol_test.py:922 Passwordmaxage is set to 9. Password will expire in 9 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:925 Sleep for 9 secs and check if password expired INFO lib389.utils:accpol_test.py:931 Add lastLoginTime to activate the user account INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:942 Sleep for 4 secs and check if account is now inactivated, expected error 19 INFO lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:141 Reset user password for user-uid=pwexpusr1,ou=groups,dc=example,dc=com INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_locact_inact | 36.16 | |
-------------------------------Captured log setup------------------------------- INFO lib389.utils:accpol_test.py:80 Adding Local account policy plugin configuration entries -------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:995 AccountInactivityLimit set to 10. Account will be inactivated if not accessed in 10 secs INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:997 Sleep for 9 secs to check if account is not inactivated, expected value 0 INFO lib389.utils:accpol_test.py:999 Account should not be inactivated since AccountInactivityLimit not exceeded INFO lib389.utils:accpol_test.py:1001 Sleep for 2 more secs to check if account is inactivated INFO lib389.utils:accpol_test.py:1004 Sleep +9 secs to check if account inactusr3 is inactivated INFO lib389.utils:accpol_test.py:1007 Add lastLoginTime attribute to all users and check if its activated INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_locinact_modrdn | 27.16 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:1043 Account should not be inactivated since the subtree is not configured INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:1045 Sleep for 11 secs to check if account is not inactivated, expected value 0 INFO lib389.utils:accpol_test.py:1048 Moving users from ou=groups to ou=people subtree INFO lib389.utils:accpol_test.py:1056 Then wait for 11 secs and check if entries are inactivated INFO lib389.utils:accpol_test.py:219 Enable account by replacing lastLoginTime/createTimeStamp/ModifyTimeStamp attribute INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs | |||
Passed | suites/plugins/accpol_test.py::test_locact_modrdn | 25.50 | |
-------------------------------Captured log call-------------------------------- INFO lib389.utils:accpol_test.py:1088 Account should be inactivated since the subtree is configured INFO lib389.utils:accpol_test.py:271 add_users: Pass all of these as parameters suffix, subtree, userid and nousrs INFO lib389.utils:accpol_test.py:1090 Sleep for 11 secs to check if account is inactivated, expected value 19 INFO lib389.utils:accpol_test.py:1093 Moving users from ou=people to ou=groups subtree INFO lib389.utils:accpol_test.py:1100 Sleep for +2 secs and check users from both ou=people and ou=groups subtree INFO lib389.utils:accpol_test.py:290 del_users: Pass all of these as parameters suffix, subtree, userid and nousrs -----------------------------Captured log teardown------------------------------ INFO lib389.utils:accpol_test.py:101 Disabling Local accpolicy plugin and removing pwpolicy attrs INFO lib389.utils:accpol_test.py:62 Disabling Global accpolicy plugin and removing pwpolicy attrs | |||
Passed | suites/plugins/attr_nsslapd-pluginarg_test.py::test_duplicate_values | 11.40 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:31 Ticket 47431 - 0: Enable 7bit plugin... -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:56 Ticket 47431 - 1: Check 26 duplicate values are treated as one... DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:59 modify_s cn=7-bit check,cn=plugins,cn=config DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:83 line: [04/Jun/2021:23:56:52.946338743 -0400] - WARN - str2entry_dupcheck - 26 duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config. Extra values ignored. INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:84 Expected error "str2entry_dupcheck.* duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config." logged in /var/log/dirsrv/slapd-standalone1/errors INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:86 Ticket 47431 - 1: done | |||
Passed | suites/plugins/attr_nsslapd-pluginarg_test.py::test_multiple_value | 5.43 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:111 Ticket 47431 - 2: Check two values belonging to one arg is fixed... DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [04/Jun/2021:23:56:57.518763403 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 0: uid DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[0] uid DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 uid was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [04/Jun/2021:23:56:57.523561419 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 1: mail DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[1] mail DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 mail was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [04/Jun/2021:23:56:57.525893258 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 2: userpassword DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[2] userpassword DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 userpassword was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [04/Jun/2021:23:56:57.527929678 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 3: , DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[3] , DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 , was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:131 line - [04/Jun/2021:23:56:57.530593581 -0400] - DEBUG - NS7bitAttr - NS7bitAttr_Init - 4: dc=example,dc=com DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:132 ATTRS[4] dc=example,dc=com DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:136 dc=example,dc=com was logged INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:142 Ticket 47431 - 2: done | |||
Passed | suites/plugins/attr_nsslapd-pluginarg_test.py::test_missing_args | 6.33 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:170 Ticket 47431 - 3: Check missing args are fixed... DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 uid was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 mail was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 userpassword was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 , was logged DEBUG tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:197 dc=example,dc=com was logged INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:203 Ticket 47431 - 3: done INFO tests.suites.plugins.attr_nsslapd-pluginarg_test:attr_nsslapd-pluginarg_test.py:204 Test complete | |||
Passed | suites/plugins/cos_test.py::test_cos_operational_default | 14.82 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL lib389.utils:cos_test.py:27 Adding user (uid=user_0,ou=people,dc=example,dc=com): INFO lib389.utils:cos_test.py:153 Returned telephonenumber (exp. real): b'1234 is real' INFO lib389.utils:cos_test.py:154 Returned telephonenumber: 8 INFO lib389.utils:cos_test.py:160 Returned l (exp. real): b'here is real' INFO lib389.utils:cos_test.py:161 Returned l: 8 INFO lib389.utils:cos_test.py:170 Returned seealso (exp. virtual): b'dc=virtual,dc=example,dc=com' INFO lib389.utils:cos_test.py:171 Returned seealso: 3 INFO lib389.utils:cos_test.py:180 Returned description (exp. virtual): b'desc is virtual' INFO lib389.utils:cos_test.py:181 Returned description: 8 INFO lib389.utils:cos_test.py:191 Returned title (exp. real): b'title is real' INFO lib389.utils:cos_test.py:212 Returned title(exp. virt): b'title is virtual 1' INFO lib389.utils:cos_test.py:212 Returned title(exp. virt): b'title is virtual 0' | |||
Passed | suites/plugins/deref_aci_test.py::test_deref_and_access_control | 10.36 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.deref_aci_test:deref_aci_test.py:133 Check, that the dereference search result does not have userpassword | |||
Passed | suites/plugins/dna_test.py::test_dnatype_only_valid | 14.87 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/entryusn_test.py::test_entryusn_no_duplicates | 18.02 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/entryusn_test.py::test_entryusn_is_same_after_failure | 5.23 | |
No log output captured. | |||
Passed | suites/plugins/entryusn_test.py::test_entryusn_after_repl_delete | 37.18 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bf880142-d249-4d99-a926-76ec1e2e80f6 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 27d246c5-72bf-4fd6-a47e-efc5a4e6a990 / got description=bf880142-d249-4d99-a926-76ec1e2e80f6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/plugins/managed_entry_test.py::test_binddn_tracking | 10.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/managed_entry_test.py::test_managed_entry_removal | 7.61 | |
No log output captured. | |||
Passed | suites/plugins/memberof_test.py::test_betxnpostoperation_replace | 13.31 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/plugins/memberof_test.py::test_memberofgroupattr_add | 0.25 | |
No log output captured. | |||
Passed | suites/plugins/memberof_test.py::test_enable | 4.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:181 Enable MemberOf plugin | |||
Passed | suites/plugins/memberof_test.py::test_member_add | 0.15 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofenh1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofenh2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:211 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:212 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:215 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:216 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_delete_gr1 | 0.32 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:246 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_delete_gr2 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:278 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_member_delete_all | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:310 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:314 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' | |||
Passed | suites/plugins/memberof_test.py::test_member_after_restart | 8.63 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:349 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:353 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:364 Remove uniqueMember as a memberofgrpattr INFO tests.suites.plugins.memberof_test:memberof_test.py:371 Assert that this change of configuration did change the already set values INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_memberofgroupattr_uid | 0.08 | |
-------------------------------Captured log call-------------------------------- ERROR tests.suites.plugins.memberof_test:memberof_test.py:400 Setting 'memberUid' as memberofgroupattr is rejected (expected) | |||
Passed | suites/plugins/memberof_test.py::test_member_add_duplicate_usr1 | 0.27 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:422 Try b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' (member) ERROR tests.suites.plugins.memberof_test:memberof_test.py:429 b'uid=user_memofenh1,ou=people,dc=example,dc=com' already member of b'cn=group_memofegrp1,ou=groups,dc=example,dc=com' --> fail (expected) | |||
Passed | suites/plugins/memberof_test.py::test_member_add_duplicate_usr2 | 0.59 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:450 Check initial status INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:460 Try b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' (member) ERROR tests.suites.plugins.memberof_test:memberof_test.py:467 b'uid=user_memofenh2,ou=people,dc=example,dc=com' already member of b'cn=group_memofegrp2,ou=groups,dc=example,dc=com' --> fail (expected) INFO tests.suites.plugins.memberof_test:memberof_test.py:470 Check final status INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_uniquemember_same_user | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:557 Check initial status INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:569 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:570 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:582 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is not memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:586 Update b'uid=user_memofenh2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp3,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:613 Checking final status INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_member_not_exists | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:671 Checking Initial status INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp015,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:691 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp015,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:692 Update b'uid=user_dummy2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp015,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_member_not_exists_complex | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:806 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:807 Update b'uid=user_memofenh1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:835 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:845 Update b'uid=user_dummy1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp016,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_1 | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:1011 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:1012 Update b'uid=user_memofuser2,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:1013 Update b'uid=user_memofuser3,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (memberuid) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_2 | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:1261 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:1262 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:1263 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is memberof b'cn=group_memofegrp017,ou=groups,dc=example,dc=com' (memberuid) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp018,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:1283 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp018,ou=groups,dc=example,dc=com' (member) INFO tests.suites.plugins.memberof_test:memberof_test.py:1284 Update b'uid=user_memofuser1,ou=people,dc=example,dc=com' is no longer memberof b'cn=group_memofegrp018,ou=groups,dc=example,dc=com' (uniqueMember) INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp017,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_3 | 0.25 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp019_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp019_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_4 | 0.23 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_5 | 0.94 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:67 Create user uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:83 Create group cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_6 | 5.32 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_7 | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp016,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofenh2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_8 | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified | |||
Passed | suites/plugins/memberof_test.py::test_complex_group_scenario_9 | 0.17 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser2,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser3,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser4,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 uniqueMember: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: uid=user_memofuser1,ou=people,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_1,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_2,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_3,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:101 member: cn=group_memofegrp020_4,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'cn=group_memofegrp020_5,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.uniqueMember = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:94 Check b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com'.member = b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_1,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_2,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_3,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'cn=group_memofegrp020_4,ou=groups,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:119 --> membership verified INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser1,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:116 memberof: cn=group_memofegrp020_5,ou=groups,dc=example,dc=com INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser2,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser3,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' INFO tests.suites.plugins.memberof_test:memberof_test.py:109 Lookup memberof from b'uid=user_memofuser4,ou=people,dc=example,dc=com' | |||
Passed | suites/plugins/pluginpath_validation_test.py::test_pluginpath_validation | 10.69 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.pluginpath_validation_test:pluginpath_validation_test.py:103 Test complete | |||
Passed | suites/plugins/referint_test.py::test_referential_false_failure | 17.73 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo0,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo1,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo2,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo3,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo4,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo5,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo6,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo7,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo8,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo9,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo10,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo11,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo12,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo13,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo14,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo15,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo16,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo17,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo18,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo19,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo20,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo21,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo22,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo23,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo24,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo25,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo26,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo27,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo28,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo29,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo30,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo31,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo32,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo33,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo34,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo35,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo36,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo37,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo38,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo39,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo40,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo41,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo42,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo43,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo44,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo45,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo46,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo47,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo48,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo49,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo50,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo51,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo52,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo53,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo54,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo55,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo56,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo57,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo58,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo59,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo60,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo61,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo62,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo63,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo64,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo65,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo66,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo67,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo68,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo69,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo70,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo71,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo72,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo73,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo74,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo75,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo76,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo77,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo78,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo79,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo80,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo81,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo82,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo83,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo84,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo85,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo86,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo87,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo88,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo89,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo90,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo91,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo92,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo93,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo94,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo95,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo96,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo97,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo98,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo99,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo100,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo101,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo102,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo103,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo104,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo105,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo106,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo107,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo108,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo109,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo110,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo111,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo112,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo113,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo114,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo115,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo116,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo117,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo118,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo119,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo120,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo121,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo122,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo123,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo124,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo125,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo126,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo127,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo128,dc=example,dc=com): CRITICAL tests.suites.plugins.referint_test:referint_test.py:37 Adding user (uid=foo\,oo129,dc=example,dc=com): | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_specific_time | 9.52 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:60 Initializing root DN test suite... INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:70 test_rootdn_init: Initialized root DN test suite. INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:102 Running test_rootdn_access_specific_time... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_day_of_week | 0.11 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:185 Running test_rootdn_access_day_of_week... INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:202 Today: Sat INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:203 Allowed days: Sat,Sun INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:204 Deny days: Mon, Tue | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_ip | 0.11 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:268 Running test_rootdn_access_denied_ip... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_host | 3.53 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:333 Running test_rootdn_access_denied_host... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_ip | 0.13 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:400 Running test_rootdn_access_allowed_ip... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_host | 3.35 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:466 Running test_rootdn_access_allowed_host... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_config_validate | 0.11 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:575 Add just "rootdn-open-time" | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_denied_ip_wildcard | 0.12 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:666 Running test_rootdn_access_denied_ip_wildcard... | |||
Passed | suites/plugins/rootdn_plugin_test.py::test_rootdn_access_allowed_ip_wildcard | 1.29 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:36 Cleaning up the config area -------------------------------Captured log call-------------------------------- INFO tests.suites.plugins.rootdn_plugin_test:rootdn_plugin_test.py:712 Running test_rootdn_access_allowed_ip... | |||
Passed | suites/psearch/psearch_test.py::test_psearch | 13.80 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:psearch_test.py:31 dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 ou=groups,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 ou=people,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 ou=services,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 uid=demo_user,ou=people,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=demo_group,ou=groups,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=group_admin,ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=group_modify,ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=user_admin,ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=user_modify,ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=user_passwd_reset,ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:31 cn=user_private_read,ou=permissions,dc=example,dc=com has changed! INFO lib389:psearch_test.py:35 No more results INFO lib389:psearch_test.py:31 cn=group1,ou=groups,dc=example,dc=com has changed! INFO lib389:psearch_test.py:35 No more results | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[CRYPT] | 9.40 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA] | 0.37 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA] | 0.11 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA256] | 0.13 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA256] | 0.10 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA384] | 0.14 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA384] | 0.14 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SHA512] | 0.10 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[SSHA512] | 0.11 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[MD5] | 0.11 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_password_scheme[PBKDF2_SHA256] | 0.14 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_clear_scheme | 0.39 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_two_scheme | 4.49 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_pbkdf2_sha256 | 4.80 | |
No log output captured. | |||
Passed | suites/pwp_storage/storage_test.py::test_check_ssha512 | 8.33 | |
No log output captured. | |||
Passed | suites/referint_plugin/rename_test.py::test_rename_large_subtree | 185.75 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f6f26495-a239-48db-a952-9d1cd740f16d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5 / got description=f6f26495-a239-48db-a952-9d1cd740f16d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 919f862e-a6b5-4d90-a250-4265850c8a15 / got description=f99f4e93-2f8f-428a-8a1a-8a24b6f9bee5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5036a451-b4bd-407f-bf77-89bf3d8b71cb / got description=919f862e-a6b5-4d90-a250-4265850c8a15) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/acceptance_test.py::test_add_entry | 72.98 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier4 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'supplier4', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1efa7348-20ce-4ca7-89fd-0ea02012c430 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a2659554-7004-4b71-bc78-2f0a3da82030 / got description=1efa7348-20ce-4ca7-89fd-0ea02012c430) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect e6fe9562-ce45-452c-9c68-97e7e7b6d4db / got description=a2659554-7004-4b71-bc78-2f0a3da82030) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 314a4b4f-749f-41ad-8b87-de9089e1b899 / got description=e6fe9562-ce45-452c-9c68-97e7e7b6d4db) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:156 Joining supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect bef4355f-79f5-41f5-b048-72102102421c / got description=314a4b4f-749f-41ad-8b87-de9089e1b899) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 66be8aa8-b1b3-4ce6-9db8-63ae064e7070 / got description=bef4355f-79f5-41f5-b048-72102102421c) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 66be8aa8-b1b3-4ce6-9db8-63ae064e7070 / got description=bef4355f-79f5-41f5-b048-72102102421c) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 66be8aa8-b1b3-4ce6-9db8-63ae064e7070 / got description=bef4355f-79f5-41f5-b048-72102102421c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier4 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modify_entry | 3.43 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:128 Modifying entry uid=mmrepl_test,dc=example,dc=com - add operation INFO tests.suites.replication.acceptance_test:acceptance_test.py:138 Modifying entry uid=mmrepl_test,dc=example,dc=com - replace operation INFO tests.suites.replication.acceptance_test:acceptance_test.py:146 Modifying entry uid=mmrepl_test,dc=example,dc=com - delete operation | |||
Passed | suites/replication/acceptance_test.py::test_delete_entry | 10.16 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:168 Deleting entry uid=mmrepl_test,dc=example,dc=com during the test | |||
Passed | suites/replication/acceptance_test.py::test_modrdn_entry[0] | 20.20 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:192 Modify entry RDN uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:211 Remove entry with new RDN uid=newrdn,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modrdn_entry[1] | 20.20 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:192 Modify entry RDN uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:211 Remove entry with new RDN uid=newrdn,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modrdn_after_pause | 14.57 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:237 Adding entry uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:252 Pause all replicas INFO tests.suites.replication.acceptance_test:acceptance_test.py:255 Modify entry RDN uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:263 Resume all replicas INFO tests.suites.replication.acceptance_test:acceptance_test.py:266 Wait for replication to happen INFO tests.suites.replication.acceptance_test:acceptance_test.py:273 Remove entry with new RDN uid=newrdn,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_modify_stripattrs | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:295 Modify nsds5replicastripattrs with b'modifiersname modifytimestamp' INFO tests.suites.replication.acceptance_test:acceptance_test.py:298 Check nsds5replicastripattrs for b'modifiersname modifytimestamp' | |||
Passed | suites/replication/acceptance_test.py::test_new_suffix | 12.42 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to supplier1 INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-supplier1/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to supplier2 INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-supplier2/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to supplier3 INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-supplier3/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO tests.suites.replication.acceptance_test:acceptance_test.py:62 Adding suffix:o=test_repl and backend: repl_base to supplier4 INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=repl_base,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=repl_base,cn=ldbm database,cn=plugins,cn=config cn: repl_base nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-supplier4/db/repl_base nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=test_repl objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=test_repl",cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dtest_repl,cn=mapping tree,cn=config cn: o=test_repl nsslapd-backend: repl_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 59cc3140-4a00-48f5-9e0c-cf6cdeee351c / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b4151ad9-a9e6-475e-8eb2-b0f5158089ba / got description=59cc3140-4a00-48f5-9e0c-cf6cdeee351c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 447c92dc-9e96-4319-81d0-703a56736d71 / got description=b4151ad9-a9e6-475e-8eb2-b0f5158089ba) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3c0684e1-8843-4562-8480-3b066881e4b0 / got description=447c92dc-9e96-4319-81d0-703a56736d71) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working -----------------------------Captured log teardown------------------------------ INFO tests.suites.replication.acceptance_test:acceptance_test.py:79 Deleting suffix:o=test_repl and backend: repl_base from supplier1 WARNING lib389:mappingTree.py:234 Warning: cn=o\3Dtest_repl,cn=mapping tree,cn=config (cn=o\3Dtest_repl,cn=mapping tree,cn=config) INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO tests.suites.replication.acceptance_test:acceptance_test.py:79 Deleting suffix:o=test_repl and backend: repl_base from supplier2 WARNING lib389:mappingTree.py:234 Warning: cn=o\3Dtest_repl,cn=mapping tree,cn=config (cn=o\3Dtest_repl,cn=mapping tree,cn=config) INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO tests.suites.replication.acceptance_test:acceptance_test.py:79 Deleting suffix:o=test_repl and backend: repl_base from supplier3 WARNING lib389:mappingTree.py:234 Warning: cn=o\3Dtest_repl,cn=mapping tree,cn=config (cn=o\3Dtest_repl,cn=mapping tree,cn=config) INFO lib389:backend.py:80 List backend with suffix=o=test_repl INFO tests.suites.replication.acceptance_test:acceptance_test.py:79 Deleting suffix:o=test_repl and backend: repl_base from supplier4 WARNING lib389:mappingTree.py:234 Warning: cn=o\3Dtest_repl,cn=mapping tree,cn=config (cn=o\3Dtest_repl,cn=mapping tree,cn=config) INFO lib389:backend.py:80 List backend with suffix=o=test_repl | |||
Passed | suites/replication/acceptance_test.py::test_many_attrs | 20.25 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:353 Modifying entry uid=mmrepl_test,dc=example,dc=com - 10 add operations INFO tests.suites.replication.acceptance_test:acceptance_test.py:357 Check that everything was properly replicated after an add operation INFO tests.suites.replication.acceptance_test:acceptance_test.py:362 Modifying entry uid=mmrepl_test,dc=example,dc=com - 4 delete operations for [b'test0', b'test4', b'test7', b'test9'] INFO tests.suites.replication.acceptance_test:acceptance_test.py:366 Check that everything was properly replicated after a delete operation | |||
Passed | suites/replication/acceptance_test.py::test_double_delete | 10.18 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com INFO tests.suites.replication.acceptance_test:acceptance_test.py:45 Deleting entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:386 Deleting entry uid=mmrepl_test,dc=example,dc=com from supplier1 INFO tests.suites.replication.acceptance_test:acceptance_test.py:389 Deleting entry uid=mmrepl_test,dc=example,dc=com from supplier2 INFO tests.suites.replication.acceptance_test:acceptance_test.py:393 Entry uid=mmrepl_test,dc=example,dc=com wasn't found supplier2. It is expected. INFO tests.suites.replication.acceptance_test:acceptance_test.py:395 Make searches to check if server is alive | |||
Passed | suites/replication/acceptance_test.py::test_password_repl_error | 22.53 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:41 Adding entry uid=mmrepl_test,dc=example,dc=com -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:416 Clean the error log INFO tests.suites.replication.acceptance_test:acceptance_test.py:419 Set replication loglevel INFO tests.suites.replication.acceptance_test:acceptance_test.py:422 Modifying entry uid=mmrepl_test,dc=example,dc=com - change userpassword on supplier 2 INFO tests.suites.replication.acceptance_test:acceptance_test.py:430 Restart the servers to flush the logs INFO tests.suites.replication.acceptance_test:acceptance_test.py:439 Check the error log for the error with uid=mmrepl_test,dc=example,dc=com | |||
Passed | suites/replication/acceptance_test.py::test_invalid_agmt | 0.13 | |
-------------------------------Captured log call-------------------------------- CRITICAL lib389:agreement.py:1025 Failed to add replication agreement: {'msgtype': 105, 'msgid': 5, 'result': 53, 'desc': 'Server is unwilling to perform', 'ctrls': []} INFO lib389:acceptance_test.py:464 Invalid repl agreement correctly rejected | |||
Passed | suites/replication/acceptance_test.py::test_warining_for_invalid_replica | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:494 Set nsds5ReplicaBackoffMin to 20 INFO tests.suites.replication.acceptance_test:acceptance_test.py:497 Set nsds5ReplicaBackoffMax to 10 INFO tests.suites.replication.acceptance_test:acceptance_test.py:499 Resetting configuration: nsds5ReplicaBackoffMin INFO tests.suites.replication.acceptance_test:acceptance_test.py:501 Check the error log for the error | |||
Passed | suites/replication/acceptance_test.py::test_csnpurge_large_valueset | 37.61 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 40af1088-92cf-47b5-a363-48f6b1fa67a0 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3ae7a799-650b-4fcc-bf99-a59eaa941188 / got description=40af1088-92cf-47b5-a363-48f6b1fa67a0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:552 nsds5ReplicaPurgeDelay to 5 | |||
Passed | suites/replication/acceptance_test.py::test_urp_trigger_substring_search | 13.72 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:583 Set nsslapd-plugin-logging to on INFO tests.suites.replication.acceptance_test:acceptance_test.py:613 Entry not yet replicated on M2, wait a bit INFO tests.suites.replication.acceptance_test:acceptance_test.py:613 Entry not yet replicated on M2, wait a bit INFO tests.suites.replication.acceptance_test:acceptance_test.py:617 Check that on M2, URP as not triggered such internal search INFO tests.suites.replication.acceptance_test:acceptance_test.py:620 found line: [] | |||
Passed | suites/replication/acceptance_test.py::test_csngen_task | 35.78 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.acceptance_test:acceptance_test.py:643 Check the error log contains strings showing csn generator is tested | |||
Passed | suites/replication/cascading_test.py::test_basic_with_hub | 60.56 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:524 Creating replication topology. INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 9f8ceab2-22b5-49c0-bcc6-bb97e7ea66dd / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is working INFO lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect bc1fd81e-c5b9-4a5f-a03f-8cddbee78593 / got description=9f8ceab2-22b5-49c0-bcc6-bb97e7ea66dd) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cascading_test:cascading_test.py:45 update cn=101,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO tests.suites.replication.cascading_test:cascading_test.py:45 update cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config to add nsDS5ReplicatedAttributeListTotal INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 94d997c8-4244-42e4-8191-eaeb9db5961d / got description=bc1fd81e-c5b9-4a5f-a03f-8cddbee78593) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 00a16333-326b-44f8-9034-7f49e71f8b35 / got description=94d997c8-4244-42e4-8191-eaeb9db5961d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 7b42a3f5-df3d-475a-b767-518dcc3ede92 / got description=00a16333-326b-44f8-9034-7f49e71f8b35) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Passed | suites/replication/changelog_encryption_test.py::test_cl_encryption_setup_process | 55.14 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c0c008c5-e02f-4919-84f7-0b8932a7141f / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:45 Enable TLS ... INFO tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:50 Export changelog ... INFO tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:57 Enable changelog encryption ... INFO tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:70 Import changelog ... INFO tests.suites.replication.changelog_encryption_test:changelog_encryption_test.py:75 Test replication is still working ... | |||
Passed | suites/replication/changelog_test.py::test_dsconf_dump_changelog_files_removed | 31.49 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 48dc3e6d-8e95-4407-8fb0-cec4eb19f229 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0ab680da-5156-401b-8dc4-7f3cc019323e / got description=48dc3e6d-8e95-4407-8fb0-cec4eb19f229) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:223 Remove .ldif files, if present in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:223 Remove .done files, if present in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:53 Adding user replusr INFO tests.suites.replication.changelog_test:changelog_test.py:66 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:73 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:387 Use dsconf export-changelog with invalid parameters INFO tests.suites.replication.changelog_test:changelog_test.py:389 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'badpasswd', 'replication', 'export-changelog'] INFO tests.suites.replication.changelog_test:changelog_test.py:392 output message : b'No action provided, here is some --help.\nusage: dsconf [-h] [-v] [-D BINDDN] [-w BINDPW] [-W] [-y PWDFILE] [-b BASEDN]\n [-Z] [-j]\n instance\n {backend,backup,chaining,config,directory_manager,monitor,plugin,pwpolicy,localpwp,replication,repl-agmt,repl-winsync-agmt,repl-tasks,sasl,security,schema,repl-conflict}\n ...\n\npositional arguments:\n instance The instance name OR the LDAP url to connect to, IE\n localhost, ldap://mai.example.com:389\n {backend,backup,chaining,config,directory_manager,monitor,plugin,pwpolicy,localpwp,replication,repl-agmt,repl-winsync-agmt,repl-tasks,sasl,security,schema,repl-conflict}\n resources to act upon\n backend Manage database suffixes and backends\n backup Manage online backups\n chaining Manage database chaining/database links\n config Manage server configuration\n directory_manager Manage the directory manager account\n monitor Monitor the state of the instance\n plugin Manage plugins available on the server\n pwpolicy Get and set the global password policy settings\n localpwp Manage local (user/subtree) password policies\n replication Configure replication for a suffix\n repl-agmt Manage replication agreements\n repl-winsync-agmt Manage Winsync Agreements\n repl-tasks Manage replication tasks\n sasl Query and manipulate SASL mappings\n security Query and manipulate security options\n schema Query and manipulate schema\n repl-conflict Manage replication conflicts\n\noptional arguments:\n -h, --help show this help message and exit\n -v, --verbose Display verbose operation tracing during command\n execution\n -D BINDDN, --binddn BINDDN\n The account to bind as for executing operations\n -w BINDPW, --bindpw BINDPW\n Password for binddn\n -W, --prompt Prompt for password for the bind DN\n -y PWDFILE, --pwdfile PWDFILE\n Specifies a file containing the password for the\n binddn\n -b BASEDN, --basedn BASEDN\n Basedn (root naming context) of the instance to manage\n -Z, --starttls Connect with StartTLS\n -j, --json Return result in JSON object\n' INFO tests.suites.replication.changelog_test:changelog_test.py:397 Use dsconf replication changelog without -l option: no generated ldif files should be present in /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:400 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'password', 'replication', 'export-changelog', 'default', '-r', 'dc=example,dc=com'] INFO tests.suites.replication.changelog_test:changelog_test.py:405 Wait for all dsconf export-changelog files to be generated INFO tests.suites.replication.changelog_test:changelog_test.py:408 Check if dsconf export-changelog generated .ldif.done files are present - should not INFO tests.suites.replication.changelog_test:changelog_test.py:414 All dsconf export-changelog generated .ldif files have been successfully removed from /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:417 Use dsconf replication changelog with -l option: generated ldif files should be kept in /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:420 Command used : ['/usr/sbin/dsconf', 'ldap://LOCALHOST:39001', '-D', 'cn=Directory Manager', '-w', 'password', 'replication', 'export-changelog', 'to-ldif', '-o', '/var/lib/dirsrv/slapd-supplier1/ldif/test.ldif', '-r', 'dc=example,dc=com', '-l'] INFO tests.suites.replication.changelog_test:changelog_test.py:425 Wait for all dsconf export-changelog files to be generated INFO tests.suites.replication.changelog_test:changelog_test.py:428 Check if dsconf export-changelog generated .ldif.done files are present - should be INFO tests.suites.replication.changelog_test:changelog_test.py:432 Success : ldif file /var/lib/dirsrv/slapd-supplier1/ldif/1f02e28e-c5b411eb-b692a547-51d9bf5e_cl.ldif.done is present | |||
Passed | suites/replication/changelog_test.py::test_verify_changelog | 0.19 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:455 LDAP operations add, modify, modrdn and delete INFO tests.suites.replication.changelog_test:changelog_test.py:53 Adding user replusr INFO tests.suites.replication.changelog_test:changelog_test.py:66 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:73 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:80 Dump changelog using nss5task and check if ldap operations are logged INFO tests.suites.replication.changelog_test:changelog_test.py:89 Remove ldif files, if present in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:100 No existing changelog ldif files present INFO tests.suites.replication.changelog_test:changelog_test.py:102 Running nsds5task to dump changelog database to a file INFO tests.suites.replication.changelog_test:changelog_test.py:105 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:109 Changelog ldif file exist: /var/lib/dirsrv/slapd-supplier1/ldif/1f02e28e-c5b411eb-b692a547-51d9bf5e_cl.ldif INFO tests.suites.replication.changelog_test:changelog_test.py:119 Checking changelog ldif file for ldap operations INFO tests.suites.replication.changelog_test:changelog_test.py:124 Checking if all required changetype operations are present INFO tests.suites.replication.changelog_test:changelog_test.py:130 Valid ldap operations: {'add', 'delete', 'modrdn', 'modify'} INFO tests.suites.replication.changelog_test:changelog_test.py:131 Ldap operations found: {'add', 'modrdn', 'modify', 'delete'} | |||
Passed | suites/replication/changelog_test.py::test_verify_changelog_online_backup | 6.22 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:482 Run db2bak script to take database backup INFO lib389:tasks.py:652 Backup task backup_06052021_001200 completed successfully INFO tests.suites.replication.changelog_test:changelog_test.py:494 Database backup is created successfully INFO tests.suites.replication.changelog_test:changelog_test.py:499 Run bak2db to restore directory server INFO lib389:tasks.py:709 Restore task restore_06052021_001203 completed successfully INFO tests.suites.replication.changelog_test:changelog_test.py:506 LDAP operations add, modify, modrdn and delete INFO tests.suites.replication.changelog_test:changelog_test.py:53 Adding user replusr INFO tests.suites.replication.changelog_test:changelog_test.py:66 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:73 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:80 Dump changelog using nss5task and check if ldap operations are logged INFO tests.suites.replication.changelog_test:changelog_test.py:89 Remove ldif files, if present in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:98 Existing changelog ldif file: /var/lib/dirsrv/slapd-supplier1/ldif/1f02e28e-c5b411eb-b692a547-51d9bf5e_cl.ldif removed INFO tests.suites.replication.changelog_test:changelog_test.py:100 No existing changelog ldif files present INFO tests.suites.replication.changelog_test:changelog_test.py:102 Running nsds5task to dump changelog database to a file INFO tests.suites.replication.changelog_test:changelog_test.py:105 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:109 Changelog ldif file exist: /var/lib/dirsrv/slapd-supplier1/ldif/1f02e28e-c5b411eb-b692a547-51d9bf5e_cl.ldif INFO tests.suites.replication.changelog_test:changelog_test.py:119 Checking changelog ldif file for ldap operations INFO tests.suites.replication.changelog_test:changelog_test.py:124 Checking if all required changetype operations are present INFO tests.suites.replication.changelog_test:changelog_test.py:130 Valid ldap operations: {'add', 'delete', 'modrdn', 'modify'} INFO tests.suites.replication.changelog_test:changelog_test.py:131 Ldap operations found: {'add', 'modrdn', 'modify', 'delete'} | |||
Passed | suites/replication/changelog_test.py::test_verify_changelog_offline_backup | 5.87 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:535 Run db2bak to take database backup INFO tests.suites.replication.changelog_test:changelog_test.py:542 Run bak2db to restore directory server INFO tests.suites.replication.changelog_test:changelog_test.py:555 Database backup is created successfully INFO tests.suites.replication.changelog_test:changelog_test.py:560 LDAP operations add, modify, modrdn and delete INFO tests.suites.replication.changelog_test:changelog_test.py:53 Adding user replusr INFO tests.suites.replication.changelog_test:changelog_test.py:66 Modify RDN of user uid=replusr,ou=People,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:73 Deleting user: uid=cl5usr,ou=people,dc=example,dc=com INFO tests.suites.replication.changelog_test:changelog_test.py:80 Dump changelog using nss5task and check if ldap operations are logged INFO tests.suites.replication.changelog_test:changelog_test.py:89 Remove ldif files, if present in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:98 Existing changelog ldif file: /var/lib/dirsrv/slapd-supplier1/ldif/1f02e28e-c5b411eb-b692a547-51d9bf5e_cl.ldif removed INFO tests.suites.replication.changelog_test:changelog_test.py:100 No existing changelog ldif files present INFO tests.suites.replication.changelog_test:changelog_test.py:102 Running nsds5task to dump changelog database to a file INFO tests.suites.replication.changelog_test:changelog_test.py:105 Check if changelog ldif file exist in: /var/lib/dirsrv/slapd-supplier1/ldif INFO tests.suites.replication.changelog_test:changelog_test.py:109 Changelog ldif file exist: /var/lib/dirsrv/slapd-supplier1/ldif/1f02e28e-c5b411eb-b692a547-51d9bf5e_cl.ldif INFO tests.suites.replication.changelog_test:changelog_test.py:119 Checking changelog ldif file for ldap operations INFO tests.suites.replication.changelog_test:changelog_test.py:124 Checking if all required changetype operations are present INFO tests.suites.replication.changelog_test:changelog_test.py:130 Valid ldap operations: {'add', 'delete', 'modrdn', 'modify'} INFO tests.suites.replication.changelog_test:changelog_test.py:131 Ldap operations found: {'add', 'modrdn', 'modify', 'delete'} | |||
Passed | suites/replication/changelog_test.py::test_changelog_maxage | 5.12 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:145 Testing Ticket 47669 - Test duration syntax in the changelogs INFO lib389:changelog_test.py:148 Bind as cn=Directory Manager -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:582 1. Test nsslapd-changelogmaxage in cn=changelog5,cn=config INFO lib389:changelog_test.py:585 Bind as cn=Directory Manager INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 12345 -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 10s -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 30M -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 12h -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 2D -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 4w -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:182 Test nsslapd-changelogmaxage: -123 -- invalid ERROR tests.suites.replication.changelog_test:changelog_test.py:187 Expectedly failed to add nsslapd-changelogmaxage: -123 to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform INFO tests.suites.replication.changelog_test:changelog_test.py:182 Test nsslapd-changelogmaxage: xyz -- invalid ERROR tests.suites.replication.changelog_test:changelog_test.py:187 Expectedly failed to add nsslapd-changelogmaxage: xyz to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform | |||
Passed | suites/replication/changelog_test.py::test_ticket47669_changelog_triminterval | 0.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:615 2. Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config INFO lib389:changelog_test.py:618 Bind as cn=Directory Manager INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogtrim-interval: 12345 -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogtrim-interval: 10s -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogtrim-interval: 30M -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogtrim-interval: 12h -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogtrim-interval: 2D -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogtrim-interval: 4w -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:182 Test nsslapd-changelogtrim-interval: -123 -- invalid ERROR tests.suites.replication.changelog_test:changelog_test.py:187 Expectedly failed to add nsslapd-changelogtrim-interval: -123 to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform INFO tests.suites.replication.changelog_test:changelog_test.py:182 Test nsslapd-changelogtrim-interval: xyz -- invalid ERROR tests.suites.replication.changelog_test:changelog_test.py:187 Expectedly failed to add nsslapd-changelogtrim-interval: xyz to cn=changelog,cn=userRoot,cn=ldbm database,cn=plugins,cn=config: error Server is unwilling to perform | |||
Passed | suites/replication/changelog_test.py::test_retrochangelog_maxage | 0.43 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:684 4. Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config INFO lib389:changelog_test.py:687 Bind as cn=Directory Manager INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 12345 -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 10s -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 30M -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 12h -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 2D -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:175 Test nsslapd-changelogmaxage: 4w -- valid INFO tests.suites.replication.changelog_test:changelog_test.py:182 Test nsslapd-changelogmaxage: -123 -- invalid INFO tests.suites.replication.changelog_test:changelog_test.py:182 Test nsslapd-changelogmaxage: xyz -- invalid INFO lib389:changelog_test.py:699 ticket47669 was successfully verified. | |||
Passed | suites/replication/changelog_test.py::test_retrochangelog_trimming_crash | 25.30 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_test:changelog_test.py:724 1. Test retroCL trimming crash in cn=Retro Changelog Plugin,cn=plugins,cn=config INFO lib389:changelog_test.py:728 ticket50736 start verification INFO lib389:changelog_test.py:744 ticket 50736 was successfully verified. | |||
Passed | suites/replication/changelog_trimming_test.py::test_max_age | 19.89 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_trimming_test:changelog_trimming_test.py:90 Testing changelog trimming interval with max age... | |||
Passed | suites/replication/changelog_trimming_test.py::test_max_entries | 13.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.changelog_trimming_test:changelog_trimming_test.py:137 Testing changelog triming interval with max entries... | |||
Passed | suites/replication/cleanallruv_max_tasks_test.py::test_max_tasks | 104.17 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier4 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'supplier4', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ac24bc39-0f93-491b-8279-8761e7b56045 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 32976344-27ab-4f5f-a9b0-f3980d0fbea6 / got description=ac24bc39-0f93-491b-8279-8761e7b56045) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 4cfc0a7c-3621-4857-ab39-e73200b29e33 / got description=32976344-27ab-4f5f-a9b0-f3980d0fbea6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 65b3b576-117b-4eb2-ad05-de4a68fe9dcc / got description=4cfc0a7c-3621-4857-ab39-e73200b29e33) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:156 Joining supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e60616b2-f8eb-4145-ad99-4a453f248f34 / got description=65b3b576-117b-4eb2-ad05-de4a68fe9dcc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6495ee7c-88da-48cd-94b2-2e824ff5d212 / got description=e60616b2-f8eb-4145-ad99-4a453f248f34) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier4 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created | |||
Passed | suites/replication/cleanallruv_test.py::test_clean | 135.95 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier4 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'supplier4', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 143b0c3e-c31b-4ac9-be37-f7f086263bd7 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ea4cce01-0085-43b8-9fcf-4493f49032fb / got description=143b0c3e-c31b-4ac9-be37-f7f086263bd7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 8fdb5bed-adde-4d25-8679-8f51ca8098c7 / got description=ea4cce01-0085-43b8-9fcf-4493f49032fb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b1943ba8-e5c9-4878-8d97-e0e0f45013b9 / got description=8fdb5bed-adde-4d25-8679-8f51ca8098c7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:156 Joining supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5a640942-2e3b-4a9a-acfb-b0acdb4d6d11 / got description=b1943ba8-e5c9-4878-8d97-e0e0f45013b9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a3bece95-3d7c-4de6-81b9-e612a2ee5899 / got description=5a640942-2e3b-4a9a-acfb-b0acdb4d6d11) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier4 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bcbbee0a-59d6-4bb0-bd16-45bc8ae66258 / got description=a3bece95-3d7c-4de6-81b9-e612a2ee5899) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect f3538aac-9a09-4277-a1a6-e891a02801ee / got description=bcbbee0a-59d6-4bb0-bd16-45bc8ae66258) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 1e14100a-0781-4dcd-9f84-ac53f532aad3 / got description=f3538aac-9a09-4277-a1a6-e891a02801ee) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 49e9b797-d7f2-4643-891e-b7f6a2c178b6 / got description=1e14100a-0781-4dcd-9f84-ac53f532aad3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7ecdddc4-ecc0-443f-b012-3a869bb8288d / got description=49e9b797-d7f2-4643-891e-b7f6a2c178b6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 240c35c8-dfb8-46a2-b5de-1065bbbe5e96 / got description=7ecdddc4-ecc0-443f-b012-3a869bb8288d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 594214fc-696d-4640-b617-2dd59abc8069 / got description=240c35c8-dfb8-46a2-b5de-1065bbbe5e96) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e1d78804-b54b-4547-beb1-315be0de9946 / got description=594214fc-696d-4640-b617-2dd59abc8069) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 60f22789-f2d4-4f0d-99b5-a186edc1694f / got description=e1d78804-b54b-4547-beb1-315be0de9946) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect da2893f3-6c50-47af-bd07-985ef06e95dc / got description=60f22789-f2d4-4f0d-99b5-a186edc1694f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b0af4199-b715-4743-9130-52350a1a145b / got description=da2893f3-6c50-47af-bd07-985ef06e95dc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 98e5eb35-f8de-422e-b0b3-a0a68fcba84a / got description=b0af4199-b715-4743-9130-52350a1a145b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:204 Running test_clean... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:207 test_clean: disable supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:211 test_clean: run the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:221 test_clean: check all the suppliers have been cleaned... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:225 test_clean PASSED, restoring supplier 4... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 4f9d7ea0-7292-48b2-bb63-ca89da8a0ae1 / got description=98e5eb35-f8de-422e-b0b3-a0a68fcba84a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 550b98d4-b49b-4701-a840-b34a6b00afe9 / got description=4f9d7ea0-7292-48b2-bb63-ca89da8a0ae1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 04ab8a4c-f6ab-4f6f-9ffc-599518f08b9b / got description=550b98d4-b49b-4701-a840-b34a6b00afe9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 9ec18a9c-3cd1-44f8-a0bd-29acce5f0fdf / got description=04ab8a4c-f6ab-4f6f-9ffc-599518f08b9b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e4194ca5-0e29-4cf3-b1cc-d91e77d95d1d / got description=9ec18a9c-3cd1-44f8-a0bd-29acce5f0fdf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c4b959d0-365a-4bb3-b70a-96f546805644 / got description=e4194ca5-0e29-4cf3-b1cc-d91e77d95d1d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 97a62f8e-2a51-40be-8a65-837851672994 / got description=c4b959d0-365a-4bb3-b70a-96f546805644) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 2bec0852-01bb-4dfb-b00b-72a8ddf9ca5a / got description=97a62f8e-2a51-40be-8a65-837851672994) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a90ffa37-fd5c-4cbc-a1b4-9afb0eb44c72 / got description=2bec0852-01bb-4dfb-b00b-72a8ddf9ca5a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 759742e6-ac03-4ecf-bc91-46b0fe63f2b1 / got description=a90ffa37-fd5c-4cbc-a1b4-9afb0eb44c72) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 03529398-079e-4680-aecb-a5bf22f10853 / got description=759742e6-ac03-4ecf-bc91-46b0fe63f2b1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 512811a2-73e1-448a-aa76-0ac0b8b643b8 / got description=03529398-079e-4680-aecb-a5bf22f10853) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f5774967-4fb4-40e9-96ba-be34b109aa67 / got description=512811a2-73e1-448a-aa76-0ac0b8b643b8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5114fd43-6e95-44d5-855c-1898af25e279 / got description=f5774967-4fb4-40e9-96ba-be34b109aa67) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5114fd43-6e95-44d5-855c-1898af25e279 / got description=f5774967-4fb4-40e9-96ba-be34b109aa67) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_clean_restart | 131.57 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3104172d-aa26-459e-934a-d132ae940403 / got description=5114fd43-6e95-44d5-855c-1898af25e279) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 745bb347-0bb1-407f-bd39-af0d035e483f / got description=3104172d-aa26-459e-934a-d132ae940403) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 77bdba41-5528-4c26-b928-b498a14eabd5 / got description=745bb347-0bb1-407f-bd39-af0d035e483f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b2b2fb53-f663-43a9-9397-b746936506bf / got description=77bdba41-5528-4c26-b928-b498a14eabd5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 2b915e2c-2a65-4f47-bc10-14419962be24 / got description=b2b2fb53-f663-43a9-9397-b746936506bf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect b3314840-541c-49a1-b623-8bd8c7edeaa3 / got description=2b915e2c-2a65-4f47-bc10-14419962be24) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8a230255-7519-4d56-a117-38b9b6e8e004 / got description=b3314840-541c-49a1-b623-8bd8c7edeaa3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4a491892-b2fe-4df6-8300-2b98d3764433 / got description=8a230255-7519-4d56-a117-38b9b6e8e004) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 96dc0c39-da03-4c11-8462-68469ffb0e50 / got description=4a491892-b2fe-4df6-8300-2b98d3764433) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2a1056f7-b881-42bc-a9ab-da7b5aa62e56 / got description=96dc0c39-da03-4c11-8462-68469ffb0e50) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2f6558f0-f996-46c5-876c-f5f9d3b4a9b6 / got description=2a1056f7-b881-42bc-a9ab-da7b5aa62e56) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7c450287-197f-4e1e-96c9-38ea175071e5 / got description=2f6558f0-f996-46c5-876c-f5f9d3b4a9b6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:256 Running test_clean_restart... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:259 test_clean: disable supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:267 test_clean: run the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:292 test_clean_restart: check all the suppliers have been cleaned... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:296 test_clean_restart PASSED, restoring supplier 4... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect c09d90c4-84be-4f16-9bdc-7f9f5ace9c77 / got description=7c450287-197f-4e1e-96c9-38ea175071e5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 39331b02-c542-4ba5-8f85-a064328dba90 / got description=c09d90c4-84be-4f16-9bdc-7f9f5ace9c77) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect eb96ce84-a325-4b23-9ddd-d5a714840071 / got description=39331b02-c542-4ba5-8f85-a064328dba90) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 537786fd-24bb-454e-8ef1-c8d7b73a3af5 / got description=eb96ce84-a325-4b23-9ddd-d5a714840071) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 974d5945-2b33-4950-a52c-04bc59674473 / got description=537786fd-24bb-454e-8ef1-c8d7b73a3af5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e4d4cd3c-f052-47d8-8a52-7aad3027a002 / got description=974d5945-2b33-4950-a52c-04bc59674473) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5100924f-14a7-4233-8a67-939337d363fb / got description=e4d4cd3c-f052-47d8-8a52-7aad3027a002) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 43ebc2cb-313f-4ba7-b386-6c1e81d2d7a6 / got description=5100924f-14a7-4233-8a67-939337d363fb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2c299615-f531-4ac2-8796-ec7bd029b0cb / got description=43ebc2cb-313f-4ba7-b386-6c1e81d2d7a6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6e7ae3cc-ce73-44f1-820c-6a6d15727e74 / got description=2c299615-f531-4ac2-8796-ec7bd029b0cb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a404fcec-a0e5-4377-ab01-71cb9af179ab / got description=6e7ae3cc-ce73-44f1-820c-6a6d15727e74) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0a11ed78-2446-4885-9e91-0e2f8407bfdf / got description=a404fcec-a0e5-4377-ab01-71cb9af179ab) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 201e3618-e306-4222-a5aa-7b42e09e54d1 / got description=0a11ed78-2446-4885-9e91-0e2f8407bfdf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 41d2eeb7-a28d-445b-b291-f93e707c385d / got description=201e3618-e306-4222-a5aa-7b42e09e54d1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_clean_force | 100.09 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e0463539-1170-4a1f-b4d6-465f9537223e / got description=41d2eeb7-a28d-445b-b291-f93e707c385d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b623801a-5db0-4bc4-8201-fa51f936481c / got description=e0463539-1170-4a1f-b4d6-465f9537223e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e6703db8-055b-4972-b2a8-a2f4daef09e0 / got description=b623801a-5db0-4bc4-8201-fa51f936481c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4c1852a5-814f-4852-91bb-3903414eecb3 / got description=e6703db8-055b-4972-b2a8-a2f4daef09e0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 9c871514-de74-4546-8dd0-ce9dd23d6145 / got description=4c1852a5-814f-4852-91bb-3903414eecb3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 4ba5ddce-edb0-4d72-9184-2adc9401ce76 / got description=9c871514-de74-4546-8dd0-ce9dd23d6145) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 55ccc3f8-a083-465f-903b-a9a12f147994 / got description=4ba5ddce-edb0-4d72-9184-2adc9401ce76) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9365739e-f263-4b24-8a39-b9e5a784636b / got description=55ccc3f8-a083-465f-903b-a9a12f147994) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 42cb159d-d326-4374-8bcd-dfffcaf50348 / got description=9365739e-f263-4b24-8a39-b9e5a784636b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c933e659-6639-4807-a6d3-2b4ed8afec8a / got description=42cb159d-d326-4374-8bcd-dfffcaf50348) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9349a03c-a8d5-4ea9-a1ae-6b69d6a7d310 / got description=c933e659-6639-4807-a6d3-2b4ed8afec8a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect cdfc1978-d925-4c82-9224-d9241e1d86a7 / got description=9349a03c-a8d5-4ea9-a1ae-6b69d6a7d310) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:323 Running test_clean_force... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_clean_force: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:341 test_clean: run the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:351 test_clean_force: check all the suppliers have been cleaned... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:355 test_clean_force PASSED, restoring supplier 4... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 74c2a5e9-90b7-4494-808c-111cf0989016 / got description=cdfc1978-d925-4c82-9224-d9241e1d86a7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2bc87b51-acd4-44fb-8134-c1a8eb44f2f2 / got description=74c2a5e9-90b7-4494-808c-111cf0989016) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4b49ca76-e8fe-4eb8-ab23-ee112c01e3b0 / got description=2bc87b51-acd4-44fb-8134-c1a8eb44f2f2) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect deedb601-242d-44dd-bd8e-29e4b530881a / got description=4b49ca76-e8fe-4eb8-ab23-ee112c01e3b0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 59f21901-f2f1-4be8-a4aa-69cca505f187 / got description=deedb601-242d-44dd-bd8e-29e4b530881a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f036c603-1a62-49b4-aa99-32a5563aeda1 / got description=59f21901-f2f1-4be8-a4aa-69cca505f187) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect cbf20ac8-4c45-4369-9617-68723001d96f / got description=f036c603-1a62-49b4-aa99-32a5563aeda1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 02622bc9-1a2a-47fd-b1bb-0297fd498b45 / got description=cbf20ac8-4c45-4369-9617-68723001d96f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 35cc22fc-ef3d-4070-8d23-e42d78c5c04a / got description=02622bc9-1a2a-47fd-b1bb-0297fd498b45) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8ba85906-9122-444e-a67d-ccf2f3482d53 / got description=35cc22fc-ef3d-4070-8d23-e42d78c5c04a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d1582f43-46cb-4c25-95f8-7f9893c5f547 / got description=8ba85906-9122-444e-a67d-ccf2f3482d53) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 54667190-d4d2-4148-aa94-83b05e0c5b9b / got description=d1582f43-46cb-4c25-95f8-7f9893c5f547) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect de8451fc-09bc-4050-a34d-b93684c605c3 / got description=54667190-d4d2-4148-aa94-83b05e0c5b9b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 3bb729f7-3090-4cc2-a7ac-27e52367d579 / got description=de8451fc-09bc-4050-a34d-b93684c605c3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_abort | 80.60 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ee3eb8c0-d9ef-4d96-84be-02e1e536b888 / got description=3bb729f7-3090-4cc2-a7ac-27e52367d579) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 3514aac9-c0f3-4737-89a6-9eca0fb3eecc / got description=ee3eb8c0-d9ef-4d96-84be-02e1e536b888) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 8b93d524-cd0e-4232-bb7c-66b2ee92863f / got description=3514aac9-c0f3-4737-89a6-9eca0fb3eecc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect aed33e14-a025-484d-9a53-0560b9dfa310 / got description=8b93d524-cd0e-4232-bb7c-66b2ee92863f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 11e869f3-7fbe-4946-942f-9018a82bc77a / got description=aed33e14-a025-484d-9a53-0560b9dfa310) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 333c738c-baba-474e-9f40-bc3f02ce4c32 / got description=11e869f3-7fbe-4946-942f-9018a82bc77a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3970242f-337f-432e-91f0-beea3a006236 / got description=333c738c-baba-474e-9f40-bc3f02ce4c32) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d9975b4d-a348-4956-8c39-09e77ac97ab8 / got description=3970242f-337f-432e-91f0-beea3a006236) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 53df0322-faf7-43c8-ae18-7e31aed86667 / got description=d9975b4d-a348-4956-8c39-09e77ac97ab8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 6a500243-b739-46f1-b4ed-fba7cec9409c / got description=53df0322-faf7-43c8-ae18-7e31aed86667) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5692e8d6-66d5-437e-8538-e902658d8bdc / got description=6a500243-b739-46f1-b4ed-fba7cec9409c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect da60901f-de92-477e-9667-8c658fa2dbe4 / got description=5692e8d6-66d5-437e-8538-e902658d8bdc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:378 Running test_abort... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:383 test_abort: stop supplier 2 to freeze the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:387 test_abort: add the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:402 test_abort: check supplier 1 no longer has a cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:408 test_abort: start supplier 2 to begin the restore process... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:411 test_abort PASSED, restoring supplier 4... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 81da99a6-f93d-421a-9d28-c4895f0e4ae8 / got description=da60901f-de92-477e-9667-8c658fa2dbe4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7672649a-c69a-4abc-a412-ca91a70149eb / got description=81da99a6-f93d-421a-9d28-c4895f0e4ae8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b411ab01-6aac-4ec1-b9a1-7d2bb199466f / got description=7672649a-c69a-4abc-a412-ca91a70149eb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect fa60c6ba-174b-43de-b2e4-92b6584b980a / got description=b411ab01-6aac-4ec1-b9a1-7d2bb199466f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect eb9705af-b3a4-4ac2-8f19-f5fe2b5cc31d / got description=fa60c6ba-174b-43de-b2e4-92b6584b980a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a8596dc1-c539-4ba2-9648-718181585eb4 / got description=eb9705af-b3a4-4ac2-8f19-f5fe2b5cc31d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 25c705b1-77e9-4196-a75a-7402b6d30f1f / got description=a8596dc1-c539-4ba2-9648-718181585eb4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 6f1d5894-2560-460e-9ea9-e61cf60af725 / got description=25c705b1-77e9-4196-a75a-7402b6d30f1f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b362da73-04c2-4b86-a959-7c4ad9bbb035 / got description=6f1d5894-2560-460e-9ea9-e61cf60af725) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 513dcdc3-7a9e-4b0a-b39c-d8ee7de61e76 / got description=b362da73-04c2-4b86-a959-7c4ad9bbb035) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 2fca9aba-7781-41cd-bbfb-af10159b56e9 / got description=513dcdc3-7a9e-4b0a-b39c-d8ee7de61e76) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7c248092-ff52-499c-988e-14903b413982 / got description=2fca9aba-7781-41cd-bbfb-af10159b56e9) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7c248092-ff52-499c-988e-14903b413982 / got description=2fca9aba-7781-41cd-bbfb-af10159b56e9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b80aa112-eb87-4f13-9108-9045bdf41e59 / got description=7c248092-ff52-499c-988e-14903b413982) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 441faffd-6f71-4fdd-b8b4-0c02719f2075 / got description=b80aa112-eb87-4f13-9108-9045bdf41e59) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_abort_restart | 101.58 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 770c77a2-7dc8-45e3-bfa8-6cf2e4a41f1e / got description=441faffd-6f71-4fdd-b8b4-0c02719f2075) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 5e06caae-66bc-450f-b5cd-a77c4860dfc1 / got description=770c77a2-7dc8-45e3-bfa8-6cf2e4a41f1e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 031fddba-cdcc-47db-9f6d-b15797d11e96 / got description=5e06caae-66bc-450f-b5cd-a77c4860dfc1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c5629537-3d7b-4299-a756-34fabc42c246 / got description=031fddba-cdcc-47db-9f6d-b15797d11e96) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6b6bca8c-e416-4901-bddd-b7ac12105075 / got description=c5629537-3d7b-4299-a756-34fabc42c246) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 9fb910c2-1308-4f99-8798-6bd590336bba / got description=6b6bca8c-e416-4901-bddd-b7ac12105075) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4b050ccf-779c-4f4d-9910-7b3c8e18e72b / got description=9fb910c2-1308-4f99-8798-6bd590336bba) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8cb74627-6aee-419d-92bc-ad208ab85f49 / got description=4b050ccf-779c-4f4d-9910-7b3c8e18e72b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect d4622001-5652-4f7c-ba38-fde40ffaad2a / got description=8cb74627-6aee-419d-92bc-ad208ab85f49) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f084b169-ec0e-457e-a064-ad3faf7eb639 / got description=d4622001-5652-4f7c-ba38-fde40ffaad2a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 33946d73-7bf6-4626-9c97-4e8ede6e9868 / got description=f084b169-ec0e-457e-a064-ad3faf7eb639) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 42d4f281-9dd4-4ff9-ad0d-c1326bd532b5 / got description=33946d73-7bf6-4626-9c97-4e8ede6e9868) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:444 Running test_abort_restart... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:449 test_abort_restart: stop supplier 3 to freeze the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:453 test_abort_restart: add the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:468 test_abort_abort: check supplier 1 no longer has a cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:490 test_abort_restart PASSED, restoring supplier 4... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 4e0b24b6-5ec9-4fe7-b700-49a3a708b4a6 / got description=42d4f281-9dd4-4ff9-ad0d-c1326bd532b5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 61a672d7-53cb-4330-857a-fafdb868001b / got description=4e0b24b6-5ec9-4fe7-b700-49a3a708b4a6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f934699a-69c7-4093-89d0-33de3a5fef9a / got description=61a672d7-53cb-4330-857a-fafdb868001b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c9bdaa71-7b99-421e-976f-153c3ba16aef / got description=f934699a-69c7-4093-89d0-33de3a5fef9a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 2948c068-044c-48d4-a7f1-af93864eb0bb / got description=c9bdaa71-7b99-421e-976f-153c3ba16aef) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 863fd720-320b-43c1-b0e8-690012c74fe7 / got description=2948c068-044c-48d4-a7f1-af93864eb0bb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect b5ddf881-a6c1-403e-a44e-f287c3cb43dd / got description=863fd720-320b-43c1-b0e8-690012c74fe7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 62e0b7b1-8754-4464-92a6-50d8ea7d04c5 / got description=b5ddf881-a6c1-403e-a44e-f287c3cb43dd) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c6edbc79-0998-4973-b18f-a03e2e68c5cc / got description=62e0b7b1-8754-4464-92a6-50d8ea7d04c5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8f4b74ee-78f3-4336-b512-2319f14360d7 / got description=c6edbc79-0998-4973-b18f-a03e2e68c5cc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f1afa0df-0807-4ab3-b435-baa4d61284c9 / got description=8f4b74ee-78f3-4336-b512-2319f14360d7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 386a57bd-948a-489f-a23e-ac10aa0ac75b / got description=f1afa0df-0807-4ab3-b435-baa4d61284c9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2f82e4f9-bc66-4767-b543-6629d13c6098 / got description=386a57bd-948a-489f-a23e-ac10aa0ac75b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 29a5adc0-d93b-49a4-b827-e77ac59d4d12 / got description=2f82e4f9-bc66-4767-b543-6629d13c6098) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_abort_certify | 109.69 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 88cfcec6-9158-46bd-9c8c-afca8363cb59 / got description=29a5adc0-d93b-49a4-b827-e77ac59d4d12) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 2a7ae96e-4a94-4bd7-896b-b9c05c6178b7 / got description=88cfcec6-9158-46bd-9c8c-afca8363cb59) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 964563db-9684-4ebc-a631-71750a28639d / got description=2a7ae96e-4a94-4bd7-896b-b9c05c6178b7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 30f0ee2d-e3d4-4d86-b0c3-3e157bb343e9 / got description=964563db-9684-4ebc-a631-71750a28639d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 1f4f6ce2-8d31-4bc9-80e7-9b34486cb413 / got description=30f0ee2d-e3d4-4d86-b0c3-3e157bb343e9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect ae5edfde-440f-4f50-9af1-27484c3e526c / got description=1f4f6ce2-8d31-4bc9-80e7-9b34486cb413) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2ecd3fb4-7a42-4e96-90df-aa191c840ddf / got description=ae5edfde-440f-4f50-9af1-27484c3e526c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 97550309-8681-4214-b585-4bdd38532aca / got description=2ecd3fb4-7a42-4e96-90df-aa191c840ddf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 5a699b30-59bf-45b2-ba95-aaede7da94e4 / got description=97550309-8681-4214-b585-4bdd38532aca) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d2064a3d-5628-4e05-bc5d-f766aec5e6d7 / got description=5a699b30-59bf-45b2-ba95-aaede7da94e4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 84f7d10f-b088-4600-a0a1-ccc4d9fbab75 / got description=d2064a3d-5628-4e05-bc5d-f766aec5e6d7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect f1f74a79-a448-4746-b241-df3a9624790e / got description=84f7d10f-b088-4600-a0a1-ccc4d9fbab75) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:513 Running test_abort_certify... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_abort_certify: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:519 test_abort_certify: stop supplier 2 to freeze the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:523 test_abort_certify: add the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:535 test_abort_certify: abort the cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:539 test_abort_certify... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:546 test_abort_certify: start supplier 2 to allow the abort task to finish... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:555 test_abort_certify: check supplier 1 no longer has a cleanAllRUV task... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:560 test_abort_certify PASSED, restoring supplier 4... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e0d0ec79-1656-4ecf-9268-f6014add17b3 / got description=f1f74a79-a448-4746-b241-df3a9624790e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 2d0ace0f-91bb-4236-a860-aa5d190387d0 / got description=e0d0ec79-1656-4ecf-9268-f6014add17b3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2d0e5a39-d7d7-48b6-9b5f-f77c3b2bf6f8 / got description=2d0ace0f-91bb-4236-a860-aa5d190387d0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect f2018fe2-4c7a-42b7-b460-950a953d11e9 / got description=2d0e5a39-d7d7-48b6-9b5f-f77c3b2bf6f8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect a683455e-25ba-490c-a1a6-3531801fb270 / got description=f2018fe2-4c7a-42b7-b460-950a953d11e9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ab2db98f-0d3d-4b63-b0c1-59f59e7832a1 / got description=a683455e-25ba-490c-a1a6-3531801fb270) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect fdc9c856-15e9-4f94-8bcf-576aa70fcc55 / got description=ab2db98f-0d3d-4b63-b0c1-59f59e7832a1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 73a77859-32e6-4d03-931c-a38e28693080 / got description=fdc9c856-15e9-4f94-8bcf-576aa70fcc55) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3cf8c230-9beb-4107-8cff-d8c317210a6f / got description=73a77859-32e6-4d03-931c-a38e28693080) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 076cf8fe-70ab-4ccc-9cdc-727564d1d55e / got description=3cf8c230-9beb-4107-8cff-d8c317210a6f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e9a2d8ec-ca8a-4e22-92c6-2fe1fed33994 / got description=076cf8fe-70ab-4ccc-9cdc-727564d1d55e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 388bee2c-3a4a-4465-be77-763c626d6c48 / got description=e9a2d8ec-ca8a-4e22-92c6-2fe1fed33994) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0afada23-5272-480f-889d-4120c98e394d / got description=388bee2c-3a4a-4465-be77-763c626d6c48) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect e75d118a-ed88-4083-a8f0-2640cb2171b7 / got description=0afada23-5272-480f-889d-4120c98e394d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_stress_clean | 125.90 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1f0c7ee6-1720-473e-856b-e73eec96e60e / got description=e75d118a-ed88-4083-a8f0-2640cb2171b7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 43164fbb-01f7-47eb-82b7-3bc34d03b874 / got description=1f0c7ee6-1720-473e-856b-e73eec96e60e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect fe197fac-1e04-42bd-bcc9-515f85db38b5 / got description=43164fbb-01f7-47eb-82b7-3bc34d03b874) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3654ac31-7aeb-4077-821d-ed160c6ed340 / got description=fe197fac-1e04-42bd-bcc9-515f85db38b5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 642194de-aed7-4571-817c-975aed6f461f / got description=3654ac31-7aeb-4077-821d-ed160c6ed340) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 1829ed5d-5863-4961-bb40-82cbf0d76f6d / got description=642194de-aed7-4571-817c-975aed6f461f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e705d831-873a-48b6-ae4f-364da9026ef1 / got description=1829ed5d-5863-4961-bb40-82cbf0d76f6d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8f92505c-6d12-41f1-bb41-fec2792305e0 / got description=e705d831-873a-48b6-ae4f-364da9026ef1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 7e0b4492-f349-4f43-ba57-081fd5dd8f5a / got description=8f92505c-6d12-41f1-bb41-fec2792305e0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7d6217aa-39d7-4d21-a41f-8e0e30b2e262 / got description=7e0b4492-f349-4f43-ba57-081fd5dd8f5a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0c5cc1f0-758a-4f6c-93f6-2f02299bd4c0 / got description=7d6217aa-39d7-4d21-a41f-8e0e30b2e262) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c7c701f0-0acb-41ce-acb5-c73a41c8ef17 / got description=0c5cc1f0-758a-4f6c-93f6-2f02299bd4c0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:585 Running test_stress_clean... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:586 test_stress_clean: put all the suppliers under load... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:603 test_stress_clean: allow some time for replication to get flowing... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:609 test_stress_clean: allow some time for supplier 4 to push changes out (60 seconds)... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_stress_clean: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:625 test_stress_clean: wait for all the updates to finish... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:632 test_stress_clean: check if all the replicas have been cleaned... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:636 test_stress_clean: PASSED, restoring supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:639 Sleep for 120 seconds to allow replication to complete... INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1755add1-4bb4-4680-8912-789010f926c6 / got description=c7c701f0-0acb-41ce-acb5-c73a41c8ef17) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6fc48c5b-99d5-412d-94ca-99838a7198f8 / got description=1755add1-4bb4-4680-8912-789010f926c6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 01dd0aa2-b097-47d6-b220-9b3f3607a9bc / got description=6fc48c5b-99d5-412d-94ca-99838a7198f8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect d1c8ea81-c563-482f-b93c-c0b540573928 / got description=01dd0aa2-b097-47d6-b220-9b3f3607a9bc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 840bbc55-0084-436f-accd-ad2b65f4bffa / got description=d1c8ea81-c563-482f-b93c-c0b540573928) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8bb33071-0bad-4c1d-b2ad-ee698460302d / got description=840bbc55-0084-436f-accd-ad2b65f4bffa) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 576ddc97-8416-4c2e-8ee2-9648612d2148 / got description=8bb33071-0bad-4c1d-b2ad-ee698460302d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b9d0bc0e-11a5-47a6-aad0-90143c4fcebd / got description=576ddc97-8416-4c2e-8ee2-9648612d2148) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d95f1ab3-ffa1-426b-8afc-69016306d440 / got description=b9d0bc0e-11a5-47a6-aad0-90143c4fcebd) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7ab42fa3-3562-4527-9967-1bb6439f9459 / got description=d95f1ab3-ffa1-426b-8afc-69016306d440) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 7e645383-71a5-4911-9ad7-572b02d541d6 / got description=7ab42fa3-3562-4527-9967-1bb6439f9459) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7e0d4993-98e6-43a7-874d-6d2386d4948e / got description=7e645383-71a5-4911-9ad7-572b02d541d6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect f7c1883a-8b63-4f12-b5d7-29a834fa73b9 / got description=7e0d4993-98e6-43a7-874d-6d2386d4948e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 6ec9351c-35dd-42e1-84bd-b6819ad078db / got description=f7c1883a-8b63-4f12-b5d7-29a834fa73b9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d5d27a27-cc6b-413d-b9c5-a87c79bc580a / got description=6ec9351c-35dd-42e1-84bd-b6819ad078db) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 41d87433-46d4-476f-be4d-6b34ed42ad1f / got description=d5d27a27-cc6b-413d-b9c5-a87c79bc580a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect ab243927-0b4e-4aea-b209-6848d5d91def / got description=41d87433-46d4-476f-be4d-6b34ed42ad1f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d0d6e437-033a-4845-9564-48c28d396dc4 / got description=ab243927-0b4e-4aea-b209-6848d5d91def) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect cd183af0-f0df-47cd-823f-1be2ea00241f / got description=d0d6e437-033a-4845-9564-48c28d396dc4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 738ed492-e2fe-4fde-9a78-bba021b9963f / got description=cd183af0-f0df-47cd-823f-1be2ea00241f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_multiple_tasks_with_force | 126.30 | |
-------------------------------Captured log setup------------------------------- DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:153 Wait a bit before the reset - it is required for the slow machines DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:155 -------------- BEGIN RESET of m4 ----------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0ae7c9dd-bee7-4d1c-9e94-c087ec8d26d1 / got description=738ed492-e2fe-4fde-9a78-bba021b9963f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect c45cd72c-3f75-4896-9984-55d551a57bff / got description=0ae7c9dd-bee7-4d1c-9e94-c087ec8d26d1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect e4019e79-1218-4bd6-8467-6275a7d89377 / got description=c45cd72c-3f75-4896-9984-55d551a57bff) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a21b2fbb-ae24-4277-9502-54d4ae9acf8d / got description=e4019e79-1218-4bd6-8467-6275a7d89377) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6202921f-6d85-4429-a0d4-87193967bd5e / got description=a21b2fbb-ae24-4277-9502-54d4ae9acf8d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 18c68185-3219-4b01-9ab1-743f8fb9e6a1 / got description=6202921f-6d85-4429-a0d4-87193967bd5e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7a3ea99e-2a76-4205-aca2-68e42e499ddb / got description=18c68185-3219-4b01-9ab1-743f8fb9e6a1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 67ff0ab2-ab33-413b-9870-0ca88e7a9567 / got description=7a3ea99e-2a76-4205-aca2-68e42e499ddb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect f31f2fd8-7a62-4d5c-84f8-ed289ffd32e9 / got description=67ff0ab2-ab33-413b-9870-0ca88e7a9567) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8d71388e-7e7d-44e8-9267-686a8ce7b2fb / got description=e3a218d9-a0cf-431d-9df9-1f77c230743b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 371f9abc-c7b1-4a0a-8c0e-caf07d8c7af4 / got description=8d71388e-7e7d-44e8-9267-686a8ce7b2fb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working DEBUG tests.suites.replication.cleanallruv_test:cleanallruv_test.py:181 -------------- FINISH RESET of m4 ----------------- -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:677 Running test_multiple_tasks_with_force... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:71 test_multiple_tasks_with_force: remove all the agreements to supplier 4... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:696 test_multiple_tasks_with_force: run the cleanAllRUV task with "force" on... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:705 test_multiple_tasks_with_force: run the cleanAllRUV task with "force" off... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:724 test_multiple_tasks_with_force: check all the suppliers have been cleaned... INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:1 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:2 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:85 check_ruvs for replica dc=example,dc=com:3 (suffix:rid) INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:728 test_abort: check supplier 1 no longer has a cleanAllRUV task... -----------------------------Captured log teardown------------------------------ INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 82db3397-8b2d-47e4-85e5-ffade1f3941a / got description=371f9abc-c7b1-4a0a-8c0e-caf07d8c7af4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 85bf7493-54c6-40b7-a34f-78e956c2b443 / got description=82db3397-8b2d-47e4-85e5-ffade1f3941a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:148 Supplier 4 has been successfully restored. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 85cf5d78-c976-49af-82ed-53921a63f9ff / got description=85bf7493-54c6-40b7-a34f-78e956c2b443) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6dcb41be-b869-45f1-904d-e1cc784fe0ab / got description=85cf5d78-c976-49af-82ed-53921a63f9ff) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 0694d149-8dc7-4ac8-a307-206759b989ef / got description=6dcb41be-b869-45f1-904d-e1cc784fe0ab) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect f08f3519-62d1-4711-a69b-d88f83523469 / got description=0694d149-8dc7-4ac8-a307-206759b989ef) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 9f893d43-df36-4736-9666-0e9bb02488db / got description=f08f3519-62d1-4711-a69b-d88f83523469) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 7393d2f1-5fa3-4e56-bd17-0ed508724e5b / got description=9f893d43-df36-4736-9666-0e9bb02488db) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 99b166b2-c857-47ee-a4bb-c94231051bee / got description=7393d2f1-5fa3-4e56-bd17-0ed508724e5b) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 99b166b2-c857-47ee-a4bb-c94231051bee / got description=7393d2f1-5fa3-4e56-bd17-0ed508724e5b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d87b797c-2f43-4a15-bc7f-5b32cfed69e8 / got description=99b166b2-c857-47ee-a4bb-c94231051bee) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 071f71e0-4281-41ac-8c25-c2f75634ef08 / got description=d87b797c-2f43-4a15-bc7f-5b32cfed69e8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e43d9679-7c40-48a2-968c-8a468c8f7487 / got description=071f71e0-4281-41ac-8c25-c2f75634ef08) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 41b6c9f7-76b4-4cb3-b254-32057d945d60 / got description=e43d9679-7c40-48a2-968c-8a468c8f7487) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 064f3c58-fc9e-4d74-8054-d5c46a4eecb7 / got description=41b6c9f7-76b4-4cb3-b254-32057d945d60) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working | |||
Passed | suites/replication/cleanallruv_test.py::test_clean_shutdown_crash | 68.01 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0a8cd554-3231-49c5-815b-8e6986ced6f4 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8691a5fb-1804-4b5b-a294-bd4a2b9f9bb4 / got description=0a8cd554-3231-49c5-815b-8e6986ced6f4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:777 Enabling TLS INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:780 Creating replication dns INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:788 Changing auth type INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:809 Stopping supplier2 INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:812 Run the cleanAllRUV task INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:823 Check if supplier1 crashed INFO tests.suites.replication.cleanallruv_test:cleanallruv_test.py:826 Repeat | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_add_modrdn | 51.53 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d720ab36-0b0a-4680-8b88-e5d23cb267d5 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 98668946-3598-4034-8494-acb9cf9556f9 / got description=d720ab36-0b0a-4680-8b88-e5d23cb267d5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6c76869b-2548-4afd-b327-f976c128eab4 / got description=98668946-3598-4034-8494-acb9cf9556f9) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6c76869b-2548-4afd-b327-f976c128eab4 / got description=98668946-3598-4034-8494-acb9cf9556f9) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6c76869b-2548-4afd-b327-f976c128eab4 / got description=98668946-3598-4034-8494-acb9cf9556f9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:210 Test create - modrdn INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0f5a46f0-cd94-4cfb-a3f1-10bfa5da9a0b / got description=6c76869b-2548-4afd-b327-f976c128eab4) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0f5a46f0-cd94-4cfb-a3f1-10bfa5da9a0b / got description=6c76869b-2548-4afd-b327-f976c128eab4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ca48474e-7824-4dc4-92e9-5de138f82f51 / got description=0f5a46f0-cd94-4cfb-a3f1-10bfa5da9a0b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_complex_add_modify_modrdn_delete | 94.73 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 83685be0-84c1-4b74-8960-ae9ee4f84893 / got description=ca48474e-7824-4dc4-92e9-5de138f82f51) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 83685be0-84c1-4b74-8960-ae9ee4f84893 / got description=ca48474e-7824-4dc4-92e9-5de138f82f51) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 83685be0-84c1-4b74-8960-ae9ee4f84893 / got description=ca48474e-7824-4dc4-92e9-5de138f82f51) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 83685be0-84c1-4b74-8960-ae9ee4f84893 / got description=ca48474e-7824-4dc4-92e9-5de138f82f51) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:286 Test add-del on M1 and add on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:302 Test add-mod on M1 and add on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:318 Test add-modrdn on M1 and add on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:334 Test multiple add, modrdn INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:342 Add - del on both suppliers INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:349 Test modrdn - modrdn INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:354 Test modrdn - del INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4b43979a-eee3-4fc3-8e73-b1e95b3d256a / got description=83685be0-84c1-4b74-8960-ae9ee4f84893) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4b43979a-eee3-4fc3-8e73-b1e95b3d256a / got description=83685be0-84c1-4b74-8960-ae9ee4f84893) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4b43979a-eee3-4fc3-8e73-b1e95b3d256a / got description=83685be0-84c1-4b74-8960-ae9ee4f84893) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 4b43979a-eee3-4fc3-8e73-b1e95b3d256a / got description=83685be0-84c1-4b74-8960-ae9ee4f84893) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1a6cdbcc-c225-42aa-baeb-6c144fab6dec / got description=4b43979a-eee3-4fc3-8e73-b1e95b3d256a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_conflict_attribute_multi_valued | 22.55 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:834 Check foo1 is on M1 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:834 Check foo2 is on M1 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:840 Check foo1 is on M1 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:840 Check foo2 is on M1 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:845 Check M1.uid foo1 is also on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:845 Check M1.uid foo2 is also on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:849 Check M2.uid foo1 is also on M1 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:849 Check M2.uid foo2 is also on M1 | |||
Passed | suites/replication/conflict_resolve_test.py::TestTwoSuppliers::test_conflict_attribute_single_valued | 25.62 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:922 Check foo1 is on M1 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:928 Check foo1 is on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:933 Check M1.uid foo1 is also on M2 INFO tests.suites.replication.conflict_resolve_test:conflict_resolve_test.py:937 Check M2.uid foo1 is also on M1 | |||
Passed | suites/replication/multiple_changelogs_test.py::test_multiple_changelogs | 35.73 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 26cdee27-8c3a-4c89-82e7-92d91dc2c2ee / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 274caa55-9734-4fd2-b003-28e589daac06 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 | |||
Passed | suites/replication/multiple_changelogs_test.py::test_multiple_changelogs_export_import | 14.65 | |
No log output captured. | |||
Passed | suites/replication/regression_i2_test.py::test_special_symbol_replica_agreement | 40.55 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=dc=example,dc=com INFO lib389:__init__.py:1710 Found entry dn: cn=replication manager,cn=config cn: bind dn pseudo user cn: replication manager objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {PBKDF2_SHA256}AAAIAM9Ao75Wxu2ut20MqyNxPmX0nmMPXShl5Uv0M6l0wEa51p9FfNnTq3p173dVwnHWSeY0FNU3irJTSt3b+sqJWG7kAeKH8fyJODR/xHDo6bBOFWImgClzHzaPB3h3v/cs34/HCst/57XFlLNjQqdCMtDwYCVUKiGo72zS0RngZX+Ypy5PSQOxY+ZoaskaVWacqx4xEX/8HZKQl79VUbVwQcRi19RglTHBkDhiiuhjL/SfRZ5hZmMD66ohPTgYAuHCicZKaHFQoYVO4zBsGY2SxtzCDvAitEpiZblW8/UJ7KnpoAW3yPVH8xW21tLUKVOR1ju73l4CdIqWPNIfoXLilkCKve+PXQCPL/D5p1SDjc+Nslkk/MjKY3mMTOAZyYIMK14ZknntnED1/6RsfWcIMcoKImwFd5JAnSn1DCuzri7+ INFO lib389:agreement.py:1168 Starting total init cn=-3meTo_ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38902,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config | |||
Passed | suites/replication/regression_m2_test.py::test_double_delete | 27.57 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a4ab2671-2e6f-4574-9061-b40a4b10fc06 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect eb85532e-f49f-4627-874f-8e804ad941b4 / got description=a4ab2671-2e6f-4574-9061-b40a4b10fc06) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:163 Adding a test entry user -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:239 Deleting entry uid=testuser,ou=People,dc=example,dc=com from supplier1 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:242 Deleting entry uid=testuser,ou=People,dc=example,dc=com from supplier2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0726e9a6-a9e9-4cd6-9b9f-479fbad01850 / got description=eb85532e-f49f-4627-874f-8e804ad941b4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 98ac6db2-0b4f-499f-b6b7-7942c48a292c / got description=0726e9a6-a9e9-4cd6-9b9f-479fbad01850) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_m2_test.py::test_repl_modrdn | 6.56 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:285 Add test entries - Add 3 OUs and 2 same users under 2 different OUs INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect d946d51b-b5d2-4915-99e7-3a343de11db0 / got description=98ac6db2-0b4f-499f-b6b7-7942c48a292c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 33deb6aa-f795-4616-93ca-516253fe590d / got description=d946d51b-b5d2-4915-99e7-3a343de11db0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:309 Stop Replication INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:312 Apply modrdn to M1 - move test user from OU A -> C INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:315 Apply modrdn on M2 - move test user from OU B -> C INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:318 Start Replication INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:321 Wait for sometime for repl to resume INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 62402f21-041d-4668-9f96-d53db285e56a / got description=33deb6aa-f795-4616-93ca-516253fe590d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b9e5cd84-8463-47c4-84cd-f9ea51eacd5c / got description=62402f21-041d-4668-9f96-d53db285e56a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:325 Check that there should be only one test entry under ou=C on both suppliers INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:332 Check that the replication is working fine both ways, M1 <-> M2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5fee9167-69dd-4fc7-8526-1b74c3301e6f / got description=b9e5cd84-8463-47c4-84cd-f9ea51eacd5c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 864ba432-ec1d-4e05-8dca-8eee02044bba / got description=5fee9167-69dd-4fc7-8526-1b74c3301e6f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_m2_test.py::test_password_repl_error | 12.66 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:163 Adding a test entry user -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:356 Clean the error log INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:359 Set replication loglevel INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:362 Modifying entry uid=testuser,ou=People,dc=example,dc=com - change userpassword on supplier 1 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 719391aa-ec55-4984-8356-08869e388ea8 / got description=864ba432-ec1d-4e05-8dca-8eee02044bba) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:369 Restart the servers to flush the logs INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:374 Check that password works on supplier 2 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:378 Check the error log for the error with uid=testuser,ou=People,dc=example,dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:381 Set the default loglevel | |||
Passed | suites/replication/regression_m2_test.py::test_invalid_agmt | 2.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b65d1aaa-caa0-404c-b3c1-8dec49a2ff5c / got description=719391aa-ec55-4984-8356-08869e388ea8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 70c4cdbc-4096-4366-a969-817aef0b10fc / got description=b65d1aaa-caa0-404c-b3c1-8dec49a2ff5c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_m2_test.py::test_fetch_bindDnGroup | 20.24 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ba85f1cb-5261-41e5-8f17-dbbcd737fcbc / got description=70c4cdbc-4096-4366-a969-817aef0b10fc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/regression_m2_test.py::test_plugin_bind_dn_tracking_and_replication | 0.59 | |
No log output captured. | |||
Passed | suites/replication/regression_m2_test.py::test_moving_entry_make_online_init_fail | 4.85 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:664 Generating DIT_0 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:667 Created entry: ou=OU0, dc=example, dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:672 Created entry: ou=OU0, ou=OU0, dc=example, dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:213 Create password policy for subtree ou=OU0,dc=example,dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:681 Turning tuser0 into a tombstone entry INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:681 Turning tuser2 into a tombstone entry INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:681 Turning tuser4 into a tombstone entry INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:681 Turning tuser6 into a tombstone entry INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:681 Turning tuser8 into a tombstone entry INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:684 dc=example,dc=com => ou=OU0,dc=example,dc=com => ou=OU0,ou=OU0,dc=example,dc=com => 10 USERS INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:686 Generating DIT_1 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:689 Created entry: ou=OU1,dc=example,dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:693 Created entry: ou=OU1, ou=OU1, dc=example, dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:213 Create password policy for subtree ou=OU1,dc=example,dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:697 Moving ou=OU0,ou=OU0,dc=example,dc=com to DIT_1 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:700 Moving ou=OU0,dc=example,dc=com to DIT_1 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:707 Moving USERS to ou=OU0,ou=OU0,ou=OU1,dc=example,dc=com INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:712 dc=example,dc=com => ou=OU1,dc=example,dc=com => ou=OU0,ou=OU1,dc=example,dc=com => ou=OU0,ou=OU0,ou=OU1,dc=example,dc=com => 10 USERS INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:714 Run Initialization. INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 991ea937-087d-4468-b872-88bfaf199c61 / got description=ba85f1cb-5261-41e5-8f17-dbbcd737fcbc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:723 m1entry count - 60 INFO tests.suites.replication.regression_m2_test:regression_m2_test.py:724 m2entry count - 60 | |||
Passed | suites/replication/regression_m2_test.py::test_online_init_should_create_keepalive_entries | 19.00 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-supplier1/ldif/norepl.ldif -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2466 SUCCESS: RUV from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is in sync | |||
Passed | suites/replication/regression_m2_test.py::test_online_reinit_may_hang | 47.79 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ee5d4749-a9c6-41a6-9338-9655c6d65eb6 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7486bbd2-5907-453c-93e7-685bac670b25 / got description=ee5d4749-a9c6-41a6-9338-9655c6d65eb6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-supplier1/ldif/supplier1.ldif -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ea5acb8b-62ec-4420-a9ed-ce220dc4a16d / got description=7486bbd2-5907-453c-93e7-685bac670b25) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect d297ece8-ceb9-443e-8d38-03c89be5b73c / got description=ea5acb8b-62ec-4420-a9ed-ce220dc4a16d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | suites/replication/regression_m2c2_test.py::test_ruv_url_not_added_if_different_uuid | 82.67 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39202, 'ldap-secureport': 63902, 'server-id': 'consumer2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7e34defc-3fd9-479f-8336-fde22fffb9b4 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9677c732-2f88-4fa0-aa68-445d1157b545 / got description=7e34defc-3fd9-479f-8336-fde22fffb9b4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 9103b67f-6f66-4096-9bbf-4f4578831442 / got description=9677c732-2f88-4fa0-aa68-445d1157b545) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:169 Joining consumer consumer2 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 6d832ca6-f265-41c5-ae7f-31f6d2c3ec58 / got description=9103b67f-6f66-4096-9bbf-4f4578831442) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 already exists INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer2 from supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is was created ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-supplier1/ldif/norepl.ldif -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 24b51ba7-ca7f-4581-9c51-f4d9c1ddd1ec / got description=6d832ca6-f265-41c5-ae7f-31f6d2c3ec58) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 8844fef3-466f-4abf-803f-69fc3f4ddfd4 / got description=6d832ca6-f265-41c5-ae7f-31f6d2c3ec58) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect a4e48bda-1922-4ab7-9772-18098c10226c / got description=24b51ba7-ca7f-4581-9c51-f4d9c1ddd1ec) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Passed | suites/replication/regression_m2c2_test.py::test_csngen_state_not_updated_if_different_uuid | 43.19 | |
------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-supplier1/ldif/norepl.ldif -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 0b3ae602-4042-494d-b2db-804cf136f5af / got description=a4e48bda-1922-4ab7-9772-18098c10226c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is NOT working (expect 9d61b71c-d7ac-4b3e-ba1e-1f9648907a0c / got description=a4e48bda-1922-4ab7-9772-18098c10226c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39202 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect de727d58-dddd-4994-ad6e-b3b327b50b5e / got description=0b3ae602-4042-494d-b2db-804cf136f5af) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Passed | suites/replication/regression_m3_test.py::test_cleanallruv_repl | 156.37 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0ef52290-f0e5-4117-828b-9db89544409c / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4a21e81d-da21-4b0f-882e-29ca107379ce / got description=0ef52290-f0e5-4117-828b-9db89544409c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 32c54d48-6f86-4e81-b0dd-f6b6a5967401 / got description=4a21e81d-da21-4b0f-882e-29ca107379ce) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect b9c7b0f9-4822-403d-a6ce-a172915e302a / got description=32c54d48-6f86-4e81-b0dd-f6b6a5967401) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.regression_m3_test:regression_m3_test.py:65 Change the error log levels for all suppliers INFO tests.suites.replication.regression_m3_test:regression_m3_test.py:69 Get the replication agreements for all 3 suppliers INFO tests.suites.replication.regression_m3_test:regression_m3_test.py:74 Modify nsslapd-changelogmaxage=30 and nsslapd-changelogtrim-interval=5 for M1 and M2 INFO tests.suites.replication.regression_m3_test:regression_m3_test.py:97 Add test users to 3 suppliers INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:tasks.py:1475 cleanAllRUV task (task-06052021_004559) completed successfully INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config | |||
Passed | suites/replication/repl_agmt_bootstrap_test.py::test_repl_agmt_bootstrap_credentials | 41.78 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bd4cb8be-c811-4180-9d70-6de3009ddbdd / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 149590da-06f0-471b-a060-08dfe5f90105 / got description=bd4cb8be-c811-4180-9d70-6de3009ddbdd) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaType--1-4-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 8.40 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5Flags--1-2-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.28 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaId-0-65536-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.13 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaPurgeDelay--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.13 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsDS5ReplicaBindDnGroupCheckInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.15 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaTombstonePurgeInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.16 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.16 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaReleaseTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.16 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaBackoffMin-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-3] | 0.15 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_add[nsds5ReplicaBackoffMax-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.13 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsDS5Flags--1-2-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.12 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaPurgeDelay--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.13 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsDS5ReplicaBindDnGroupCheckInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.39 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaTombstonePurgeInterval--2-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.13 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaProtocolTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.15 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaReleaseTimeout--1-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-1] | 0.16 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaBackoffMin-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-3] | 0.13 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_replica_num_modify[nsds5ReplicaBackoffMax-0-9223372036854775807-9999999999999999999999999999999999999999999999999999999999999999999-invalid-6] | 0.37 | |
No log output captured. | |||
Passed | suites/replication/replica_config_test.py::test_same_attr_yields_same_return_code | 2.61 | |
No log output captured. | |||
Passed | suites/replication/ruvstore_test.py::test_ruv_entry_backup | 32.96 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 01e703d8-2cd8-420d-a4c2-df36b9336aff / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 898bd258-dc14-484f-8803-7f161128d7b5 / got description=01e703d8-2cd8-420d-a4c2-df36b9336aff) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-supplier1/ldif/supplier1.ldif -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:102 LDAP operations add, modify, modrdn and delete INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:58 Adding user to supplier1 INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:61 Modify RDN of user: uid=rep2lusr,ou=People,dc=example,dc=com INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:68 Deleting user: uid=ruvusr,ou=people,dc=example,dc=com INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:106 Stopping the server instance to run db2ldif task to create backup file INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:110 Starting the server after backup INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:113 Checking if backup file contains RUV and required attributes INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: objectClass INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsUniqueId INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsds50ruv INFO tests.suites.replication.ruvstore_test:ruvstore_test.py:51 Attribute found in RUV: nsruvReplicaLastModified | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_deletions_are_not_replicated | 35.42 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 6c03e9c0-f8aa-4098-8ddd-20983b53406d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 54cd4e71-2066-42e7-a628-1a8e5d5ce413 / got description=6c03e9c0-f8aa-4098-8ddd-20983b53406d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 7a2a4766-0ce0-41c7-90e9-7ea779625963 / got description=54cd4e71-2066-42e7-a628-1a8e5d5ce413) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9e4dda0e-42b3-49e9-aeed-513a3b945988 / got description=7a2a4766-0ce0-41c7-90e9-7ea779625963) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_error_20 | 1.11 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 15da6cb7-e5ad-489e-8c16-48a2d55b59e0 / got description=9e4dda0e-42b3-49e9-aeed-513a3b945988) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_segfaults | 0.10 | |
No log output captured. | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_adding_deleting | 0.13 | |
No log output captured. | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_deleting_twice | 2.15 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0822a89e-b7e4-4ee9-80bb-03ab1566d018 / got description=15da6cb7-e5ad-489e-8c16-48a2d55b59e0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c2aada50-99d7-41e3-83cf-04835076cccc / got description=0822a89e-b7e4-4ee9-80bb-03ab1566d018) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_rename_entry | 2.35 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 28a9a65e-e1f5-4665-a95e-b160c80e0f76 / got description=c2aada50-99d7-41e3-83cf-04835076cccc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1e7ab77e-0e21-4a41-9fea-9a3ce98de3ee / got description=28a9a65e-e1f5-4665-a95e-b160c80e0f76) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_userpassword_attribute | 3.15 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e7531f8a-90e2-4538-b6e2-2831d66d87e0 / got description=1e7ab77e-0e21-4a41-9fea-9a3ce98de3ee) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 50c3bb8e-095a-4c0c-bc7b-668d68dae1fb / got description=e7531f8a-90e2-4538-b6e2-2831d66d87e0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | suites/replication/series_of_repl_bugs_test.py::test_tombstone_modrdn | 3.59 | |
No log output captured. | |||
Passed | suites/replication/single_master_test.py::test_mail_attr_repl | 39.50 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect c81ea892-6a5e-408e-8005-fb4df5053da6 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.single_master_test:single_master_test.py:68 Check that replication is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect a88a8185-60c6-4dae-be1c-cace50d5371c / got description=c81ea892-6a5e-408e-8005-fb4df5053da6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO tests.suites.replication.single_master_test:single_master_test.py:84 Back up /var/lib/dirsrv/slapd-consumer1/db/userRoot/mail.db to /tmp/mail.db INFO tests.suites.replication.single_master_test:single_master_test.py:88 Remove 'mail' attr from supplier INFO tests.suites.replication.single_master_test:single_master_test.py:91 Wait for the replication to happen INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ea8b4eca-987b-469c-9cf3-7aa325112a09 / got description=a88a8185-60c6-4dae-be1c-cace50d5371c) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ea8b4eca-987b-469c-9cf3-7aa325112a09 / got description=a88a8185-60c6-4dae-be1c-cace50d5371c) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ea8b4eca-987b-469c-9cf3-7aa325112a09 / got description=a88a8185-60c6-4dae-be1c-cace50d5371c) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ea8b4eca-987b-469c-9cf3-7aa325112a09 / got description=a88a8185-60c6-4dae-be1c-cace50d5371c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO tests.suites.replication.single_master_test:single_master_test.py:95 Restore /tmp/mail.db to /var/lib/dirsrv/slapd-consumer1/db/userRoot/mail.db INFO tests.suites.replication.single_master_test:single_master_test.py:99 Make a search for mail attribute in attempt to crash server INFO tests.suites.replication.single_master_test:single_master_test.py:102 Make sure that server hasn't crashed INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ae739f2b-e74a-4380-89d0-772d317b5787 / got description=ea8b4eca-987b-469c-9cf3-7aa325112a09) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ae739f2b-e74a-4380-89d0-772d317b5787 / got description=ea8b4eca-987b-469c-9cf3-7aa325112a09) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ae739f2b-e74a-4380-89d0-772d317b5787 / got description=ea8b4eca-987b-469c-9cf3-7aa325112a09) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect ae739f2b-e74a-4380-89d0-772d317b5787 / got description=ea8b4eca-987b-469c-9cf3-7aa325112a09) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working | |||
Passed | suites/replication/single_master_test.py::test_lastupdate_attr_before_init | 23.48 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38902, 'ldap-secureport': 63602, 'server-id': 'standalone2', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38901 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:38902 is was created | |||
Passed | suites/replication/tls_client_auth_repl_test.py::test_ssl_transport | 56.05 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect c0fdb89c-72e2-4b25-b528-07a171eb9729 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 37686a19-e9b8-4fa3-904d-e333b9765cd7 / got description=c0fdb89c-72e2-4b25-b528-07a171eb9729) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 42e438d5-93c8-40fe-aba5-04affa9fa7b5 / got description=37686a19-e9b8-4fa3-904d-e333b9765cd7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect fda71e38-51d4-446b-bb4e-39748444e61e / got description=42e438d5-93c8-40fe-aba5-04affa9fa7b5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 1b2cc073-7586-40da-af07-99dc4a2b1f42 / got description=fda71e38-51d4-446b-bb4e-39748444e61e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 714b8e49-8237-4cb0-8774-c88dde7b53cb / got description=1b2cc073-7586-40da-af07-99dc4a2b1f42) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect fc56c8e9-efb8-43a6-acc7-44a845bf918c / got description=714b8e49-8237-4cb0-8774-c88dde7b53cb) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect ca175bab-5cf2-4767-8b15-221f3dfce006 / got description=fc56c8e9-efb8-43a6-acc7-44a845bf918c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 86d6b1c9-f9d3-42a7-915b-4cc12df9b5d9 / got description=ca175bab-5cf2-4767-8b15-221f3dfce006) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 4b92df70-5c51-4195-aa51-51c9eb17cabc / got description=86d6b1c9-f9d3-42a7-915b-4cc12df9b5d9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 84b8c509-0803-41f8-8de5-dd609687c066 / got description=4b92df70-5c51-4195-aa51-51c9eb17cabc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 5c2629fb-44c8-42f2-bf7a-56ce2a51dcc3 / got description=84b8c509-0803-41f8-8de5-dd609687c066) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 94e1be14-e085-4fcb-bce9-8d9bb00b6cbd / got description=5c2629fb-44c8-42f2-bf7a-56ce2a51dcc3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working | |||
Passed | suites/replication/tls_client_auth_repl_test.py::test_extract_pemfiles | 5.95 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:168 Check that nsslapd-extract-pemfiles is on INFO tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:172 Set nsslapd-extract-pemfiles = '{}' and check replication works) INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect f0b1e1e0-5cc0-4cee-ae16-ff524e6d7218 / got description=94e1be14-e085-4fcb-bce9-8d9bb00b6cbd) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect 7075fa7a-a6a2-49c9-bf35-a59f203cf630 / got description=f0b1e1e0-5cc0-4cee-ae16-ff524e6d7218) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working INFO tests.suites.replication.tls_client_auth_repl_test:tls_client_auth_repl_test.py:172 Set nsslapd-extract-pemfiles = '{}' and check replication works) INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 059045b1-76bc-4926-9626-b2ded038d43b / got description=7075fa7a-a6a2-49c9-bf35-a59f203cf630) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect a0c38353-3d1d-495b-b1b1-b7414ab62265 / got description=059045b1-76bc-4926-9626-b2ded038d43b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working | |||
Passed | suites/replication/tombstone_fixup_test.py::test_precise_tombstone_purging | 33.32 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:597 Export task export_06052021_005058 for file /var/lib/dirsrv/slapd-supplier1/ldif/export.ldif completed successfully INFO lib389.utils:tombstone_fixup_test.py:77 Import replication LDIF file... INFO lib389:tasks.py:525 Import task import_06052021_005102 for file /var/lib/dirsrv/slapd-supplier1/ldif/export.ldif completed successfully INFO lib389:tasks.py:982 tombstone fixup task fixupTombstone_06052021_005104 for backend userRoot completed successfully INFO lib389:tasks.py:982 tombstone fixup task fixupTombstone_06052021_005107 for backend userRoot completed successfully INFO lib389.utils:tombstone_fixup_test.py:116 Wait for tombstone purge interval to pass... INFO lib389.utils:tombstone_fixup_test.py:123 Wait for tombstone purge interval to pass again... | |||
Passed | suites/replication/tombstone_test.py::test_purge_success | 10.83 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- INFO Tombstone:tombstone.py:164 Reviving nsuniqueid=b1f24b90-c5b911eb-9f97997f-51d1e7e4,uid=testuser,ou=people,dc=example,dc=com -> uid=testuser,ou=people,dc=example,dc=com | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_not_int_value | 24.91 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5f90e4e0-0996-4666-8968-0cd46f0bc17a / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 85224977-b32a-49cf-b775-d9a146946b93 / got description=5f90e4e0-0996-4666-8968-0cd46f0bc17a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_multi_value | 0.08 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr0] | 0.08 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr1] | 0.33 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr2] | 0.07 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr3] | 0.08 | |
No log output captured. | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr0] | 21.18 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on supplier1 INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times DEBUG tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 10 <= 11 -----------------------------Captured log teardown------------------------------ INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:76 Clear the errors log in the end of the test case | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr1] | 36.23 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on supplier1 INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 0 to 2 times DEBUG tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 0 <= 1 <= 2 -----------------------------Captured log teardown------------------------------ INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:76 Clear the errors log in the end of the test case | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr2] | 40.71 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on supplier1 INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times DEBUG tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 5 <= 11 -----------------------------Captured log teardown------------------------------ INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:76 Clear the errors log in the end of the test case | |||
Passed | suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr3] | 44.82 | |
-------------------------------Captured log setup------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:63 Add 100 nested entries under replicated suffix on supplier1 INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:71 Delete created entries -------------------------------Captured log call-------------------------------- INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:170 Set Replication Debugging loglevel for the errorlog INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:180 Gather all sync attempts within Counter dict, group by timestamp INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:201 Take the most common timestamp and assert it has appeared in the range from 4 to 11 times DEBUG tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:204 4 <= 5 <= 11 -----------------------------Captured log teardown------------------------------ INFO tests.suites.replication.wait_for_async_feature_test:wait_for_async_feature_test.py:76 Clear the errors log in the end of the test case | |||
Passed | suites/resource_limits/fdlimits_test.py::test_fd_limits | 10.28 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.resource_limits.fdlimits_test:fdlimits_test.py:69 Test PASSED | |||
Passed | suites/retrocl/basic_test.py::test_retrocl_exclude_attr_add | 17.35 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/retrocl/basic_test.py::test_retrocl_exclude_attr_mod | 11.57 | |
No log output captured. | |||
Passed | suites/rewriters/adfilter_test.py::test_adfilter_objectCategory | 13.91 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/rewriters/basic_test.py::test_rewriters_container | 8.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/rewriters/basic_test.py::test_foo_filter_rewriter | 6.97 | |
No log output captured. | |||
Passed | suites/roles/basic_test.py::test_filterrole | 9.91 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/roles/basic_test.py::test_managedrole | 0.25 | |
No log output captured. | |||
Passed | suites/roles/basic_test.py::test_nestedrole | 2.61 | |
No log output captured. | |||
Passed | suites/sasl/allowed_mechs_test.py::test_basic_feature | 44.78 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stdout call------------------------------ ['EXTERNAL', 'GSS-SPNEGO', 'GSSAPI', 'DIGEST-MD5', 'CRAM-MD5', 'PLAIN', 'LOGIN', 'ANONYMOUS'] -------------------------------Captured log call-------------------------------- INFO lib389:allowed_mechs_test.py:75 Test we have some of the default mechanisms INFO lib389:allowed_mechs_test.py:83 Edit mechanisms to allow just PLAIN INFO lib389:allowed_mechs_test.py:91 Restart server and make sure we still have correct allowed mechs INFO lib389:allowed_mechs_test.py:100 Edit mechanisms to allow just PLAIN and EXTERNAL INFO lib389:allowed_mechs_test.py:108 Edit mechanisms to allow just PLAIN and GSSAPI INFO lib389:allowed_mechs_test.py:126 Edit mechanisms to allow just PLAIN, GSSAPI, and ANONYMOUS INFO lib389:allowed_mechs_test.py:146 Edit mechanisms to allow just PLAIN and ANONYMOUS INFO lib389:allowed_mechs_test.py:165 Reset allowed mechaisms INFO lib389:allowed_mechs_test.py:169 Check that we have the original set of mechanisms INFO lib389:allowed_mechs_test.py:174 Check that we have the original set of mechanisms after a restart | |||
Passed | suites/sasl/allowed_mechs_test.py::test_config_set_few_mechs | 2.48 | |
-------------------------------Captured log call-------------------------------- INFO lib389:allowed_mechs_test.py:198 Set nsslapd-allowed-sasl-mechanisms to 'PLAIN GSSAPI' INFO lib389:allowed_mechs_test.py:201 Verify nsslapd-allowed-sasl-mechanisms has the values | |||
Passed | suites/sasl/plain_test.py::test_basic_feature | 18.58 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/sasl/regression_test.py::test_openldap_no_nss_crypto | 61.74 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 733cd30e-13b4-43e9-867d-c936a6733206 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect edd05ca2-e01f-4ab1-a29d-bbf6a1884795 / got description=733cd30e-13b4-43e9-867d-c936a6733206) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.sasl.regression_test:regression_test.py:133 Ticket 47536 - Allow usage of OpenLDAP libraries that don't use NSS for crypto INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect e1e9d5fa-7f0e-4e29-8cbb-228ca9a95040 / got description=edd05ca2-e01f-4ab1-a29d-bbf6a1884795) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to supplier1 ###################### INFO tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to supplier2 ###################### INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect db268296-09bb-490b-8e3b-e8bd7e6f7592 / got description=e1e9d5fa-7f0e-4e29-8cbb-228ca9a95040) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect d5fbb98e-7891-48b9-be2b-1c4740ea301a / got description=db268296-09bb-490b-8e3b-e8bd7e6f7592) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working INFO tests.suites.sasl.regression_test:regression_test.py:146 ##### Searching for entries on supplier1... INFO tests.suites.sasl.regression_test:regression_test.py:150 ##### Searching for entries on supplier2... INFO tests.suites.sasl.regression_test:regression_test.py:92 ######################### Relocate PEM files on supplier1 ###################### INFO tests.suites.sasl.regression_test:regression_test.py:100 ##### restart supplier1 INFO tests.suites.sasl.regression_test:regression_test.py:47 ######################### Check PEM files (/dev/shm/MyCA, /dev/shm/MyServerCert1, /dev/shm/MyServerKey1) in /dev/shm ###################### INFO tests.suites.sasl.regression_test:regression_test.py:53 /dev/shm/MyCA.pem is successfully generated. INFO tests.suites.sasl.regression_test:regression_test.py:66 /dev/shm/MyServerCert1.pem is successfully generated. INFO tests.suites.sasl.regression_test:regression_test.py:79 /dev/shm/MyServerKey1.pem is successfully generated. INFO tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to supplier1 ###################### INFO tests.suites.sasl.regression_test:regression_test.py:35 ######################### Adding 5 entries to supplier2 ###################### INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is NOT working (expect 60278414-f25d-4a4b-9ade-46a27cb7ab0a / got description=d5fbb98e-7891-48b9-be2b-1c4740ea301a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect d21c964a-b936-4c82-b7e4-7651e0a60e2e / got description=60278414-f25d-4a4b-9ade-46a27cb7ab0a) INFO lib389.replica:replica.py:2498 Retry: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is NOT working (expect d21c964a-b936-4c82-b7e4-7651e0a60e2e / got description=60278414-f25d-4a4b-9ade-46a27cb7ab0a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702 to ldaps://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701 is working INFO tests.suites.sasl.regression_test:regression_test.py:162 ##### Searching for entries on supplier1... INFO tests.suites.sasl.regression_test:regression_test.py:166 ##### Searching for entries on supplier2... INFO lib389:tasks.py:597 Export task export_06052021_005746 for file /var/lib/dirsrv/slapd-supplier1/ldif/supplier1.ldif completed successfully INFO tests.suites.sasl.regression_test:regression_test.py:173 Ticket 47536 - PASSED | |||
Passed | suites/schema/eduperson_test.py::test_account_locking | 10.29 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.schema.eduperson_test:eduperson_test.py:88 Test PASSED | |||
Passed | suites/schema/schema_reload_test.py::test_schema_reload_with_searches | 9.78 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:49 Test the searches still work as expected during schema reload tasks | |||
Passed | suites/schema/schema_reload_test.py::test_schema_operation | 4.29 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:94 case 1: Test the cases in the original schema are preserved. INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:122 Case 2: Duplicated schema except cases are not loaded. INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:152 case 2: MOZILLAATTRIBUTE is not in the objectclasses list -- PASS INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:155 Case 2-1: Use the custom schema with mozillaattribute | |||
Passed | suites/schema/schema_reload_test.py::test_valid_schema | 2.18 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:184 Test schema-reload task with valid schema INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:187 Create valid schema file (99user.ldif)... INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:204 Run the schema-reload task... INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:211 Check cn=schema to verify the valid schema was added | |||
Passed | suites/schema/schema_reload_test.py::test_invalid_schema | 4.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:235 Test schema-reload task with invalid schema INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:238 Create valid schema file (98user.ldif)... INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:254 Create invalid schema file (99user.ldif)... INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:272 Run the schema-reload task, it should fail... INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:279 Check cn=schema to verify the invalid schema was not added INFO tests.suites.schema.schema_reload_test:schema_reload_test.py:286 The invalid schema is not present on the server | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_one | 33.16 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 4459e0eb-0c53-492b-b9f7-2adb345e7eb5 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:182 test_schema_replication_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7ff99b5d5940> (supplier <lib389.DirSrv object at 0x7ff99c60fc70>, consumer <lib389.DirSrv object at 0x7ff99b5d5be0> -------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - no error INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:222 test_schema_replication_one topology_m1c1 <lib389.topologies.TopologyMain object at 0x7ff99b5d5940> (supplier <lib389.DirSrv object at 0x7ff99c60fc70>, consumer <lib389.DirSrv object at 0x7ff99b5d5be0> DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive 0 (expected 1) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'1' (expected 2) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:233 test_schema_replication_one supplier_schema_csn=b'60bb0486000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:234 ctest_schema_replication_one onsumer_schema_csn=b'60bb0486000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36] 389-Directory/2.0.5 B2021.156.0143 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95] localhost.localdomain:39001 (/etc/dirsrv/slapd-supplier1) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [198] [05/Jun/2021:00:58:25.105779881 -0400] - INFO - main - 389-Directory/2.0.5 B2021.156.0143 starting up DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [306] [05/Jun/2021:00:58:25.109785321 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [398] [05/Jun/2021:00:58:25.115129608 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [490] [05/Jun/2021:00:58:25.118733425 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [598] [05/Jun/2021:00:58:25.914694077 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [712] [05/Jun/2021:00:58:25.921475644 -0400] - INFO - bdb_config_upgrade_dse_info - create config entry from old config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [814] [05/Jun/2021:00:58:25.929393753 -0400] - NOTICE - bdb_start_autotune - found 7977308k physical memory DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [910] [05/Jun/2021:00:58:25.933604541 -0400] - NOTICE - bdb_start_autotune - found 7296880k available DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1017] [05/Jun/2021:00:58:25.937015910 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498581k DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1120] [05/Jun/2021:00:58:25.940806763 -0400] - NOTICE - bdb_start_autotune - total cache size: 408438169 B; DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1256] [05/Jun/2021:00:58:26.169129506 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1383] [05/Jun/2021:00:58:26.173544505 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-supplier1.socket for LDAPI requests DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1542] [05/Jun/2021:00:58:26.192482219 -0400] - INFO - postop_modify_config_dse - The change of nsslapd-securePort will not take effect until the server is restarted DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1659] [05/Jun/2021:00:58:26.693399079 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1837] [05/Jun/2021:00:58:27.406508693 -0400] - INFO - op_thread_cleanup - slapd shutting down - signaling operation threads - op stack size 2 max work q size 2 max work q stack size 2 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [1967] [05/Jun/2021:00:58:27.411650342 -0400] - INFO - slapd_daemon - slapd shutting down - closing down internal subsystems and plugins DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2070] [05/Jun/2021:00:58:27.417491981 -0400] - INFO - bdb_pre_close - Waiting for 5 database threads to stop DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2167] [05/Jun/2021:00:58:28.617630395 -0400] - INFO - bdb_pre_close - All database threads now stopped DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2278] [05/Jun/2021:00:58:28.640714797 -0400] - INFO - ldbm_back_instance_set_destructor - Set of instances destroyed DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2439] [05/Jun/2021:00:58:28.643299464 -0400] - INFO - connection_post_shutdown_cleanup - slapd shutting down - freed 2 work q stack objects - freed 2 op stack objects DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2509] [05/Jun/2021:00:58:28.645547321 -0400] - INFO - main - slapd stopped. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2611] [05/Jun/2021:00:58:29.902150079 -0400] - INFO - main - 389-Directory/2.0.5 B2021.156.0143 starting up DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2719] [05/Jun/2021:00:58:29.909760860 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2811] [05/Jun/2021:00:58:29.916149856 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [2903] [05/Jun/2021:00:58:29.918947479 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3011] [05/Jun/2021:00:58:30.681580175 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3128] [05/Jun/2021:00:58:30.685896455 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3230] [05/Jun/2021:00:58:30.691461756 -0400] - NOTICE - bdb_start_autotune - found 7977308k physical memory DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3326] [05/Jun/2021:00:58:30.693542331 -0400] - NOTICE - bdb_start_autotune - found 7296584k available DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3433] [05/Jun/2021:00:58:30.695627562 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498581k DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3563] [05/Jun/2021:00:58:30.698106642 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot entry cache (1 total): 1376256k DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3689] [05/Jun/2021:00:58:30.700419406 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot dn cache (1 total): 196608k DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3793] [05/Jun/2021:00:58:30.702844240 -0400] - NOTICE - bdb_start_autotune - total cache size: 1834501529 B; DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [3929] [05/Jun/2021:00:58:30.773102675 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4056] [05/Jun/2021:00:58:30.776071644 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-supplier1.socket for LDAPI requests DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4244] [05/Jun/2021:00:58:38.672295104 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4424] [05/Jun/2021:00:58:38.675381221 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4612] [05/Jun/2021:00:58:38.677865386 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4792] [05/Jun/2021:00:58:38.680315453 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [4962] [05/Jun/2021:00:58:39.640001824 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Beginning total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-64:39201)". DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5146] [05/Jun/2021:00:58:39.643377712 -0400] - NOTICE - NSMMReplicationPlugin - replica_subentry_check - Need to create replication keep alive entry <cn=repl keep alive 1,dc=example,dc=com> DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5291] [05/Jun/2021:00:58:39.645701466 -0400] - INFO - NSMMReplicationPlugin - replica_subentry_create - add dn: cn=repl keep alive 1,dc=example,dc=com DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5308] objectclass: top DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5334] objectclass: ldapsubentry DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5364] objectclass: extensibleObject DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5386] cn: repl keep alive 1 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5572] [05/Jun/2021:00:58:42.217370521 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Finished total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-64:39201)". Sent 16 entries. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5572] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5572 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_two | 11.50 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - (ticket 47721 allows to learn missing def) INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'2' (expected 3) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'3' (expected 4) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:289 test_schema_replication_two supplier_schema_csn=b'60bb0492000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:290 test_schema_replication_two consumer_schema_csn=b'60bb0492000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5573 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5785] 05/Jun/2021:00:58:58.539731125 -0400] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-139-64:39201) must not be overwritten (set replication log for additional info) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5785 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_three | 9.44 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Extra OC Schema is pushed - no error INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'4' (expected 5) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'5' (expected 6) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:340 test_schema_replication_three supplier_schema_csn=b'60bb049b000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:341 test_schema_replication_three consumer_schema_csn=b'60bb049b000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5786 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5786] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5786 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_four | 9.45 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - no error INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'6' (expected 7) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'7' (expected 8) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:389 test_schema_replication_four supplier_schema_csn=b'60bb04a5000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:390 ctest_schema_replication_four onsumer_schema_csn=b'60bb04a5000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5787 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5787] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 5787 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_five | 11.75 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - (fix for 47721) INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'8' (expected 9) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'9' (expected 10) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:452 test_schema_replication_five supplier_schema_csn=b'60bb04b1000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:453 ctest_schema_replication_five onsumer_schema_csn=b'60bb04b1000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 5788 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [5893] /Jun/2021:00:59:28.950514630 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6015] [05/Jun/2021:00:59:28.964823002 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04ad0002:1622869165:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6136] [05/Jun/2021:00:59:28.968808384 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04b00000:1622869168:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6299] [05/Jun/2021:00:59:28.971217861 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04b0000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6479] [05/Jun/2021:00:59:28.973870947 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca2d000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6724] [05/Jun/2021:00:59:28.977103972 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [6899] [05/Jun/2021:00:59:28.979983154 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04b0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7037] [05/Jun/2021:00:59:28.982217648 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7167] [05/Jun/2021:00:59:28.985405226 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7305] [05/Jun/2021:00:59:28.988497180 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7433] [05/Jun/2021:00:59:28.993103415 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7550] [05/Jun/2021:00:59:28.996162449 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7724] [05/Jun/2021:00:59:29.000899146 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [7906] [05/Jun/2021:00:59:29.004187020 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8077] [05/Jun/2021:00:59:29.007022091 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8199] [05/Jun/2021:00:59:29.009948719 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04b00001:1622869168:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8320] [05/Jun/2021:00:59:29.013701949 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04b10000:1622869169:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8487] [05/Jun/2021:00:59:29.017722881 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8668] [05/Jun/2021:00:59:29.020530954 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [8852] [05/Jun/2021:00:59:29.022928406 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04b0000000000000 / remotecsn:60bb04a5000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9016] [05/Jun/2021:00:59:29.148310808 -0400] - DEBUG - schema_oc_compare_strict - Attribute telexNumber is not required in 'consumerNewOCA' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9170] [05/Jun/2021:00:59:29.151447667 -0400] - DEBUG - schema_oc_superset_check - Remote consumerNewOCA schema objectclasses is a superset of the received one. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9334] [05/Jun/2021:00:59:29.156286949 -0400] - DEBUG - schema_oc_compare_strict - Attribute telexNumber is not required in 'consumerNewOCA' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9484] [05/Jun/2021:00:59:29.158829734 -0400] - DEBUG - schema_list_oc2learn - Add that unknown/extended objectclass consumerNewOCA (1.2.3.4.5.6.7.8.9.10.1) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [9770] [05/Jun/2021:00:59:29.161071742 -0400] - DEBUG - schema_oc_to_string - Replace (old[251]=( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' )) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10083] [05/Jun/2021:00:59:29.163412968 -0400] - DEBUG - supplier_get_new_definitions - supplier takes objectclass: ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'user defined' ) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10385] [05/Jun/2021:00:59:29.177402895 -0400] - DEBUG - modify_schema_prepare_mods - MOD[1] del (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' ) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10696] [05/Jun/2021:00:59:29.181134767 -0400] - DEBUG - modify_schema_prepare_mods - MOD[0] add (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'user defined' ) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10804] [05/Jun/2021:00:59:29.184205563 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [10927] [05/Jun/2021:00:59:29.190857728 -0400] - DEBUG - modify_schema_internal_mod - Successfully learn objectclasses definitions DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11140] [05/Jun/2021:00:59:29.193759579 -0400] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-139-64:39201) must not be overwritten (set replication log for additional info) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 11140 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_six | 9.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Same OC - extra MUST: Schema is pushed - no error INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'10' (expected 11) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'11' (expected 12) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:508 test_schema_replication_six supplier_schema_csn=b'60bb04ba000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:509 ctest_schema_replication_six onsumer_schema_csn=b'60bb04ba000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 11141 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11277] 05/Jun/2021:00:59:29.196973692 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] schema definitions may have been learned DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11411] [05/Jun/2021:00:59:29.390874579 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04ae000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11595] [05/Jun/2021:00:59:29.394270703 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11710] [05/Jun/2021:00:59:29.581529225 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04b10001:1622869169:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [11890] [05/Jun/2021:00:59:29.586628257 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12045] [05/Jun/2021:00:59:29.589415460 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12257] [05/Jun/2021:00:59:29.591776225 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04ad000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12437] [05/Jun/2021:00:59:29.594108832 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12592] [05/Jun/2021:00:59:29.596949629 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12804] [05/Jun/2021:00:59:29.599779640 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b0000000010000 60bb04b0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [12965] [05/Jun/2021:00:59:29.602815554 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3255000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13191] [05/Jun/2021:00:59:29.605812330 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3255000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13485] [05/Jun/2021:00:59:29.608682145 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04b0000000010000) csnBuf (60bb04ad000000010000) csnConsumerMax (60bb04ad000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13598] [05/Jun/2021:00:59:29.611075261 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04ad000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13788] [05/Jun/2021:00:59:29.614273554 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04ad000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [13943] [05/Jun/2021:00:59:29.616941928 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14055] [05/Jun/2021:00:59:29.619846543 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14187] [05/Jun/2021:00:59:29.621983203 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14406] [05/Jun/2021:00:59:29.624109673 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04b0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14538] [05/Jun/2021:00:59:29.626327124 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14735] [05/Jun/2021:00:59:29.628877036 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14868] [05/Jun/2021:00:59:29.631045130 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [14998] [05/Jun/2021:00:59:29.633457893 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 19, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15131] [05/Jun/2021:00:59:29.635719019 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15444] [05/Jun/2021:00:59:29.638029852 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04b0000000010000) csnMax (60bb04b0000000010000) csnBuf (60bb04b0000000010000) csnConsumerMax (60bb04b0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15567] [05/Jun/2021:00:59:29.640340365 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15750] [05/Jun/2021:00:59:29.642839138 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15863] [05/Jun/2021:00:59:29.645447986 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 19 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [15996] [05/Jun/2021:00:59:29.647745352 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16105] [05/Jun/2021:00:59:29.652187850 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16376] [05/Jun/2021:00:59:29.654763359 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16539] [05/Jun/2021:00:59:29.661970961 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16708] [05/Jun/2021:00:59:29.664932056 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16881] [05/Jun/2021:00:59:29.667463659 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [16989] [05/Jun/2021:00:59:30.005808530 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17159] [05/Jun/2021:00:59:30.009411712 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17333] [05/Jun/2021:00:59:30.012001084 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17507] [05/Jun/2021:00:59:30.014565180 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17678] [05/Jun/2021:00:59:30.017250116 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [17852] [05/Jun/2021:00:59:30.021101044 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18016] [05/Jun/2021:00:59:30.112289493 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-64:39201): Protocol stopped after 0 seconds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18144] [05/Jun/2021:00:59:30.115598650 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18329] [05/Jun/2021:00:59:30.117992271 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b0000000010000 60bb04b0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18503] [05/Jun/2021:00:59:30.120210011 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18611] [05/Jun/2021:00:59:30.122797460 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18719] [05/Jun/2021:00:59:35.137839357 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [18888] [05/Jun/2021:00:59:35.153753510 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19062] [05/Jun/2021:00:59:35.157495145 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19236] [05/Jun/2021:00:59:35.161356016 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19407] [05/Jun/2021:00:59:35.164914487 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: start -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [19575] [05/Jun/2021:00:59:35.168324047 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Trying non-secure slapi_ldap_init_ext DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20101] [05/Jun/2021:00:59:35.171491978 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - binddn = cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUXlOakF4TVdGa1ppMDNNVEpsTTJSbA0KTUMwNFlqUTBZMlZqTWkwM09HUTVNbVF4WlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCREtKdE9GWGZvN2F1QmNXQmRmRHIxcg==}Y9/HSfwfe+8dvHcyCrkEEKCbalgDnZKakfnYqz6GBjQGfLP/l21cFRi96L3sQaBd DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20146] +6FG8ZXZVBnUykV//FZxFhUVYM8a5I6n4251ViEjItA= DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20320] [05/Jun/2021:00:59:35.175399884 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20442] [05/Jun/2021:00:59:35.179999751 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04b10001:1622869169:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20563] [05/Jun/2021:00:59:35.190396220 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04b70000:1622869175:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20730] [05/Jun/2021:00:59:35.195282436 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [20911] [05/Jun/2021:00:59:35.199166852 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21095] [05/Jun/2021:00:59:35.206058240 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04b1000000000000 / remotecsn:60bb04ae000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21229] [05/Jun/2021:00:59:35.403689879 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04b0000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21413] [05/Jun/2021:00:59:35.407516525 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21528] [05/Jun/2021:00:59:35.594217341 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04b70001:1622869175:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21708] [05/Jun/2021:00:59:35.603476385 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [21863] [05/Jun/2021:00:59:35.607151547 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22075] [05/Jun/2021:00:59:35.610618513 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b0000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22255] [05/Jun/2021:00:59:35.614317169 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22410] [05/Jun/2021:00:59:35.617743410 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22622] [05/Jun/2021:00:59:35.620751980 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b0000000010000 60bb04b0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [22916] [05/Jun/2021:00:59:35.625120110 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (60bb04b0000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04b0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23039] [05/Jun/2021:00:59:35.630391805 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23310] [05/Jun/2021:00:59:35.633957684 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23458] [05/Jun/2021:00:59:35.638712636 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No changes to send DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23621] [05/Jun/2021:00:59:35.645168121 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23790] [05/Jun/2021:00:59:35.648651695 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [23963] [05/Jun/2021:00:59:35.652389663 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24085] [05/Jun/2021:00:59:37.164993580 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04b70001:1622869175:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24206] [05/Jun/2021:00:59:37.169395464 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04b90000:1622869177:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24369] [05/Jun/2021:00:59:37.172475370 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04b9000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24549] [05/Jun/2021:00:59:37.175379545 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca30000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24794] [05/Jun/2021:00:59:37.178452456 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [24969] [05/Jun/2021:00:59:37.181797168 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04b9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25107] [05/Jun/2021:00:59:37.184815450 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25237] [05/Jun/2021:00:59:37.187614447 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25375] [05/Jun/2021:00:59:37.190378243 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25503] [05/Jun/2021:00:59:37.192907879 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25620] [05/Jun/2021:00:59:37.196433222 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25794] [05/Jun/2021:00:59:37.204522379 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [25976] [05/Jun/2021:00:59:37.208128906 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26147] [05/Jun/2021:00:59:37.211307378 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26314] [05/Jun/2021:00:59:37.215497255 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26495] [05/Jun/2021:00:59:37.218720220 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26679] [05/Jun/2021:00:59:37.222034864 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04b1000000000000 / remotecsn:60bb04b0000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26813] [05/Jun/2021:00:59:37.418827893 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04b1000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [26928] [05/Jun/2021:00:59:37.422758760 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04b90002:1622869177:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27108] [05/Jun/2021:00:59:37.426050482 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27263] [05/Jun/2021:00:59:37.430297792 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27475] [05/Jun/2021:00:59:37.433559687 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b0000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27655] [05/Jun/2021:00:59:37.437053591 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [27810] [05/Jun/2021:00:59:37.440801160 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28022] [05/Jun/2021:00:59:37.444172893 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b9000000010000 60bb04b9 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28183] [05/Jun/2021:00:59:37.447809266 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f326a000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28409] [05/Jun/2021:00:59:37.451397940 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f326a000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28703] [05/Jun/2021:00:59:37.454677806 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04b9000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04b0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [28816] [05/Jun/2021:00:59:37.458255416 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04b0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29006] [05/Jun/2021:00:59:37.461706395 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04b0000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29161] [05/Jun/2021:00:59:37.465110568 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29273] [05/Jun/2021:00:59:37.468309151 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29492] [05/Jun/2021:00:59:37.471324492 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04b9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29624] [05/Jun/2021:00:59:37.474120681 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29821] [05/Jun/2021:00:59:37.480116459 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [29953] [05/Jun/2021:00:59:37.486513248 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30266] [05/Jun/2021:00:59:37.490365928 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04b9000000010000) csnMax (60bb04b9000000010000) csnBuf (60bb04b9000000010000) csnConsumerMax (60bb04b9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30389] [05/Jun/2021:00:59:37.493926878 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30572] [05/Jun/2021:00:59:37.499371801 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30684] [05/Jun/2021:00:59:37.502907514 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30817] [05/Jun/2021:00:59:37.506225653 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [30947] [05/Jun/2021:00:59:37.509346830 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 14, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31080] [05/Jun/2021:00:59:37.512882170 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31213] [05/Jun/2021:00:59:37.517587914 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31346] [05/Jun/2021:00:59:37.523106374 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31479] [05/Jun/2021:00:59:37.530457395 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31612] [05/Jun/2021:00:59:37.542258522 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31745] [05/Jun/2021:00:59:37.561594260 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31878] [05/Jun/2021:00:59:37.597498459 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [31991] [05/Jun/2021:00:59:37.606362730 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 14 14 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32100] [05/Jun/2021:00:59:37.665832578 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32371] [05/Jun/2021:00:59:37.670406586 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32534] [05/Jun/2021:00:59:37.677461549 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32703] [05/Jun/2021:00:59:37.683639765 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32876] [05/Jun/2021:00:59:37.688556801 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [32984] [05/Jun/2021:00:59:38.470187718 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33092] [05/Jun/2021:00:59:38.539572760 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33214] [05/Jun/2021:00:59:38.552347462 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04b90002:1622869177:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33335] [05/Jun/2021:00:59:38.557331403 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04ba0000:1622869178:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33498] [05/Jun/2021:00:59:38.563619823 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04ba000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33678] [05/Jun/2021:00:59:38.569788468 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca39000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [33923] [05/Jun/2021:00:59:38.574149440 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34098] [05/Jun/2021:00:59:38.577522272 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04ba000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34236] [05/Jun/2021:00:59:38.581049726 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34366] [05/Jun/2021:00:59:38.584277077 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34504] [05/Jun/2021:00:59:38.587412942 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34632] [05/Jun/2021:00:59:38.590844627 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34749] [05/Jun/2021:00:59:38.594690662 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [34923] [05/Jun/2021:00:59:38.602161402 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35105] [05/Jun/2021:00:59:38.606383768 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35276] [05/Jun/2021:00:59:38.609934975 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35443] [05/Jun/2021:00:59:38.614573393 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35624] [05/Jun/2021:00:59:38.618942500 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35808] [05/Jun/2021:00:59:38.622371876 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04ba000000000000 / remotecsn:60bb04b1000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [35942] [05/Jun/2021:00:59:38.813313581 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04b1000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36126] [05/Jun/2021:00:59:38.818543178 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36241] [05/Jun/2021:00:59:39.011071420 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04ba0002:1622869178:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36363] [05/Jun/2021:00:59:39.016036800 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04ba0002:1622869178:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36484] [05/Jun/2021:00:59:39.019509499 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04bb0000:1622869179:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36664] [05/Jun/2021:00:59:39.022927264 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [36819] [05/Jun/2021:00:59:39.026394436 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37031] [05/Jun/2021:00:59:39.029721413 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04b9000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37211] [05/Jun/2021:00:59:39.033225340 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37366] [05/Jun/2021:00:59:39.036085638 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37578] [05/Jun/2021:00:59:39.038967721 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04ba000000010000 60bb04ba DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37739] [05/Jun/2021:00:59:39.042331890 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f326a000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [37965] [05/Jun/2021:00:59:39.045222053 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f326a000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38259] [05/Jun/2021:00:59:39.048174837 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04ba000000010000) csnBuf (60bb04b9000000010000) csnConsumerMax (60bb04b9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38372] [05/Jun/2021:00:59:39.051394952 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04b9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38562] [05/Jun/2021:00:59:39.054646630 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04b9000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38717] [05/Jun/2021:00:59:39.057832858 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [38936] [05/Jun/2021:00:59:39.061280882 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04ba000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39048] [05/Jun/2021:00:59:39.065390898 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39180] [05/Jun/2021:00:59:39.069101109 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39377] [05/Jun/2021:00:59:39.072164389 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39690] [05/Jun/2021:00:59:39.075200332 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04ba000000010000) csnMax (60bb04ba000000010000) csnBuf (60bb04ba000000010000) csnConsumerMax (60bb04ba000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39813] [05/Jun/2021:00:59:39.077844199 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [39996] [05/Jun/2021:00:59:39.080622253 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40108] [05/Jun/2021:00:59:39.083767408 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40241] [05/Jun/2021:00:59:39.087119999 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40371] [05/Jun/2021:00:59:39.090144014 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 21, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40504] [05/Jun/2021:00:59:39.093099547 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40637] [05/Jun/2021:00:59:39.096906896 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40770] [05/Jun/2021:00:59:39.101800042 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [40903] [05/Jun/2021:00:59:39.108983035 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41036] [05/Jun/2021:00:59:39.121185179 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41169] [05/Jun/2021:00:59:39.142803076 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41302] [05/Jun/2021:00:59:39.178512728 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41415] [05/Jun/2021:00:59:39.187284994 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 21 21 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41524] [05/Jun/2021:00:59:39.246467358 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41795] [05/Jun/2021:00:59:39.255455655 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [41958] [05/Jun/2021:00:59:39.262417104 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42127] [05/Jun/2021:00:59:39.265846249 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42300] [05/Jun/2021:00:59:39.269320856 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42408] [05/Jun/2021:00:59:39.606715256 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42578] [05/Jun/2021:00:59:39.610579681 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42752] [05/Jun/2021:00:59:39.613644291 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [42926] [05/Jun/2021:00:59:39.616646380 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43097] [05/Jun/2021:00:59:39.619410950 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43271] [05/Jun/2021:00:59:39.622584160 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43435] [05/Jun/2021:00:59:39.713846465 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-64:39201): Protocol stopped after 0 seconds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43563] [05/Jun/2021:00:59:39.719760642 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43748] [05/Jun/2021:00:59:39.723453994 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04ba000000010000 60bb04ba DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [43922] [05/Jun/2021:00:59:39.730997989 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44030] [05/Jun/2021:00:59:39.735471108 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44138] [05/Jun/2021:00:59:40.269693342 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44255] [05/Jun/2021:00:59:40.300847498 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44363] [05/Jun/2021:00:59:44.756799825 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44532] [05/Jun/2021:00:59:44.761760128 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44706] [05/Jun/2021:00:59:44.766297599 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [44880] [05/Jun/2021:00:59:44.770002853 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45051] [05/Jun/2021:00:59:44.773851835 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: start -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45219] [05/Jun/2021:00:59:44.777507959 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Trying non-secure slapi_ldap_init_ext DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45745] [05/Jun/2021:00:59:44.781030786 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - binddn = cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUXlOakF4TVdGa1ppMDNNVEpsTTJSbA0KTUMwNFlqUTBZMlZqTWkwM09HUTVNbVF4WlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCREtKdE9GWGZvN2F1QmNXQmRmRHIxcg==}Y9/HSfwfe+8dvHcyCrkEEKCbalgDnZKakfnYqz6GBjQGfLP/l21cFRi96L3sQaBd DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45790] +6FG8ZXZVBnUykV//FZxFhUVYM8a5I6n4251ViEjItA= DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [45964] [05/Jun/2021:00:59:44.785617631 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46086] [05/Jun/2021:00:59:44.791171224 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04bb0000:1622869179:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46207] [05/Jun/2021:00:59:44.794729812 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04c00000:1622869184:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46374] [05/Jun/2021:00:59:44.799457327 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46555] [05/Jun/2021:00:59:44.803441996 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46739] [05/Jun/2021:00:59:44.807081678 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04ba000000000000 / remotecsn:60bb04b1000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46873] [05/Jun/2021:00:59:45.007563800 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04ba000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [46988] [05/Jun/2021:00:59:45.011696397 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04c00001:1622869184:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47110] [05/Jun/2021:00:59:45.015787490 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04c00001:1622869184:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47231] [05/Jun/2021:00:59:45.019595550 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04c10000:1622869185:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47411] [05/Jun/2021:00:59:45.022903096 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47566] [05/Jun/2021:00:59:45.026685479 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47778] [05/Jun/2021:00:59:45.029713282 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04ba000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [47958] [05/Jun/2021:00:59:45.032721015 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48113] [05/Jun/2021:00:59:45.037598596 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48325] [05/Jun/2021:00:59:45.040822219 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04ba000000010000 60bb04ba DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48619] [05/Jun/2021:00:59:45.046198904 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (60bb04ba000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04ba000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [48742] [05/Jun/2021:00:59:45.053566981 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49013] [05/Jun/2021:00:59:45.057182519 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49161] [05/Jun/2021:00:59:45.060525058 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No changes to send DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49324] [05/Jun/2021:00:59:45.078948186 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49493] [05/Jun/2021:00:59:45.081962250 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49666] [05/Jun/2021:00:59:45.084735823 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49788] [05/Jun/2021:00:59:46.775164671 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04c10000:1622869185:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [49909] [05/Jun/2021:00:59:46.782557832 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04c20000:1622869186:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50072] [05/Jun/2021:00:59:46.789749145 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04c2000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50252] [05/Jun/2021:00:59:46.794787248 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca3a000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50497] [05/Jun/2021:00:59:46.800796911 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50672] [05/Jun/2021:00:59:46.805134237 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04c2000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50810] [05/Jun/2021:00:59:46.810923015 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [50940] [05/Jun/2021:00:59:46.814621917 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51078] [05/Jun/2021:00:59:46.818706924 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51206] [05/Jun/2021:00:59:46.822316609 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51323] [05/Jun/2021:00:59:46.825608987 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51497] [05/Jun/2021:00:59:46.833244974 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51679] [05/Jun/2021:00:59:46.836897988 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [51850] [05/Jun/2021:00:59:46.840343584 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52017] [05/Jun/2021:00:59:46.845029769 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52198] [05/Jun/2021:00:59:46.848523342 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52313] [05/Jun/2021:00:59:46.851525430 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04c20002:1622869186:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52493] [05/Jun/2021:00:59:46.854529065 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52648] [05/Jun/2021:00:59:46.857438166 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [52860] [05/Jun/2021:00:59:46.860557482 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04ba000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53040] [05/Jun/2021:00:59:46.863734154 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53195] [05/Jun/2021:00:59:46.867013274 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53407] [05/Jun/2021:00:59:46.870460675 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c2000000010000 60bb04c2 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53568] [05/Jun/2021:00:59:46.874207157 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3281000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [53794] [05/Jun/2021:00:59:46.877566673 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3281000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54088] [05/Jun/2021:00:59:46.880725338 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04c2000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04ba000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54201] [05/Jun/2021:00:59:46.884011648 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04ba000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54391] [05/Jun/2021:00:59:46.887261796 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04ba000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54546] [05/Jun/2021:00:59:46.891212166 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54658] [05/Jun/2021:00:59:46.894470670 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [54877] [05/Jun/2021:00:59:46.897491435 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04c2000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55009] [05/Jun/2021:00:59:46.901341703 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55206] [05/Jun/2021:00:59:46.904719325 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55519] [05/Jun/2021:00:59:46.906900816 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04c2000000010000) csnMax (60bb04c2000000010000) csnBuf (60bb04c2000000010000) csnConsumerMax (60bb04c2000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55642] [05/Jun/2021:00:59:46.909044767 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55825] [05/Jun/2021:00:59:46.913254538 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [55937] [05/Jun/2021:00:59:46.916664788 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56070] [05/Jun/2021:00:59:46.918795049 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56200] [05/Jun/2021:00:59:46.921172633 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56333] [05/Jun/2021:00:59:46.923149068 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56466] [05/Jun/2021:00:59:46.926113250 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56599] [05/Jun/2021:00:59:46.930082625 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56732] [05/Jun/2021:00:59:46.937970120 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56865] [05/Jun/2021:00:59:46.948497094 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [56998] [05/Jun/2021:00:59:46.967178469 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57131] [05/Jun/2021:00:59:47.001645973 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57244] [05/Jun/2021:00:59:47.018978367 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57353] [05/Jun/2021:00:59:47.068470023 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57624] [05/Jun/2021:00:59:47.071538499 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57787] [05/Jun/2021:00:59:47.077134437 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [57956] [05/Jun/2021:00:59:47.079284499 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58129] [05/Jun/2021:00:59:47.081794912 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58129] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 58129 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_seven | 9.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed - no error INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'12' (expected 13) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'13' (expected 14) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:562 test_schema_replication_seven supplier_schema_csn=b'60bb04c4000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:563 ctest_schema_replication_seven consumer_schema_csn=b'60bb04c4000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 58130 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58237] 05/Jun/2021:00:59:48.142698639 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58345] [05/Jun/2021:00:59:48.203772350 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58467] [05/Jun/2021:00:59:48.221712676 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04c20002:1622869186:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58588] [05/Jun/2021:00:59:48.225400684 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04c40000:1622869188:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58751] [05/Jun/2021:00:59:48.228219167 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04c4000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [58931] [05/Jun/2021:00:59:48.230458378 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca42000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59176] [05/Jun/2021:00:59:48.233603966 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59351] [05/Jun/2021:00:59:48.236412901 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04c4000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59489] [05/Jun/2021:00:59:48.238724660 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59619] [05/Jun/2021:00:59:48.241027833 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59757] [05/Jun/2021:00:59:48.243350488 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [59885] [05/Jun/2021:00:59:48.245549961 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60002] [05/Jun/2021:00:59:48.247752110 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60176] [05/Jun/2021:00:59:48.252062566 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60358] [05/Jun/2021:00:59:48.254495646 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60529] [05/Jun/2021:00:59:48.260093566 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60696] [05/Jun/2021:00:59:48.263542841 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [60877] [05/Jun/2021:00:59:48.265772285 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61061] [05/Jun/2021:00:59:48.268006889 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04c4000000000000 / remotecsn:60bb04ba000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61226] [05/Jun/2021:00:59:48.395417973 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'supplierNewOCA' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61360] [05/Jun/2021:00:59:48.471092958 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04ba000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61544] [05/Jun/2021:00:59:48.474385367 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61659] [05/Jun/2021:00:59:48.658190232 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04c40002:1622869188:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61839] [05/Jun/2021:00:59:48.664153221 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [61994] [05/Jun/2021:00:59:48.666398224 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62206] [05/Jun/2021:00:59:48.668792320 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c2000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62386] [05/Jun/2021:00:59:48.671191450 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62541] [05/Jun/2021:00:59:48.673400986 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62753] [05/Jun/2021:00:59:48.675865084 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c4000000010000 60bb04c4 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [62914] [05/Jun/2021:00:59:48.678175562 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3281000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63140] [05/Jun/2021:00:59:48.680269717 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3281000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63434] [05/Jun/2021:00:59:48.682432339 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04c4000000010000) csnBuf (60bb04c2000000010000) csnConsumerMax (60bb04c2000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63547] [05/Jun/2021:00:59:48.685342333 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04c2000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63737] [05/Jun/2021:00:59:48.687948735 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04c2000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [63892] [05/Jun/2021:00:59:48.690397624 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64004] [05/Jun/2021:00:59:48.692770680 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64136] [05/Jun/2021:00:59:48.695221916 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64355] [05/Jun/2021:00:59:48.697410381 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04c4000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64487] [05/Jun/2021:00:59:48.699531382 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64684] [05/Jun/2021:00:59:48.701969834 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [64997] [05/Jun/2021:00:59:48.704607472 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04c4000000010000) csnMax (60bb04c4000000010000) csnBuf (60bb04c4000000010000) csnConsumerMax (60bb04c4000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65120] [05/Jun/2021:00:59:48.706917310 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65303] [05/Jun/2021:00:59:48.709170797 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65415] [05/Jun/2021:00:59:48.711362105 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65548] [05/Jun/2021:00:59:48.713481468 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65678] [05/Jun/2021:00:59:48.717043700 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 17, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65811] [05/Jun/2021:00:59:48.720035191 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [65944] [05/Jun/2021:00:59:48.724082495 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66077] [05/Jun/2021:00:59:48.728896393 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66210] [05/Jun/2021:00:59:48.736071336 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66343] [05/Jun/2021:00:59:48.747015247 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66476] [05/Jun/2021:00:59:48.766620679 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66609] [05/Jun/2021:00:59:48.802159545 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66722] [05/Jun/2021:00:59:48.813660818 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 17 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [66831] [05/Jun/2021:00:59:48.869616917 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67102] [05/Jun/2021:00:59:48.873464408 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67265] [05/Jun/2021:00:59:48.879507929 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67434] [05/Jun/2021:00:59:48.882746331 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67607] [05/Jun/2021:00:59:48.885460376 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67715] [05/Jun/2021:00:59:49.256441523 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [67885] [05/Jun/2021:00:59:49.259610014 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68059] [05/Jun/2021:00:59:49.262172888 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68233] [05/Jun/2021:00:59:49.264927472 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68404] [05/Jun/2021:00:59:49.267397206 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68578] [05/Jun/2021:00:59:49.269890862 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68742] [05/Jun/2021:00:59:49.362324218 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-64:39201): Protocol stopped after 0 seconds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [68870] [05/Jun/2021:00:59:49.365943173 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69055] [05/Jun/2021:00:59:49.368570727 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c4000000010000 60bb04c4 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69229] [05/Jun/2021:00:59:49.371224650 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69337] [05/Jun/2021:00:59:49.375407744 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69445] [05/Jun/2021:00:59:54.391044931 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69614] [05/Jun/2021:00:59:54.396632188 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69788] [05/Jun/2021:00:59:54.400341305 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [69962] [05/Jun/2021:00:59:54.403325966 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70133] [05/Jun/2021:00:59:54.406198930 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: start -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70301] [05/Jun/2021:00:59:54.408977785 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Trying non-secure slapi_ldap_init_ext DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70827] [05/Jun/2021:00:59:54.411543267 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - binddn = cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUXlOakF4TVdGa1ppMDNNVEpsTTJSbA0KTUMwNFlqUTBZMlZqTWkwM09HUTVNbVF4WlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCREtKdE9GWGZvN2F1QmNXQmRmRHIxcg==}Y9/HSfwfe+8dvHcyCrkEEKCbalgDnZKakfnYqz6GBjQGfLP/l21cFRi96L3sQaBd DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [70872] +6FG8ZXZVBnUykV//FZxFhUVYM8a5I6n4251ViEjItA= DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71046] [05/Jun/2021:00:59:54.414925463 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71168] [05/Jun/2021:00:59:54.418805951 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04c40002:1622869188:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71289] [05/Jun/2021:00:59:54.421727252 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04ca0000:1622869194:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71456] [05/Jun/2021:00:59:54.425817988 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71637] [05/Jun/2021:00:59:54.428587794 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71821] [05/Jun/2021:00:59:54.431358851 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04c4000000000000 / remotecsn:60bb04ba000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [71955] [05/Jun/2021:00:59:54.630261899 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04c4000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72070] [05/Jun/2021:00:59:54.640720409 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04ca0001:1622869194:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72250] [05/Jun/2021:00:59:54.644022928 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72405] [05/Jun/2021:00:59:54.647735122 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72617] [05/Jun/2021:00:59:54.650616299 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c4000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72797] [05/Jun/2021:00:59:54.653298612 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [72952] [05/Jun/2021:00:59:54.656106341 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73164] [05/Jun/2021:00:59:54.658875800 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c4000000010000 60bb04c4 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73458] [05/Jun/2021:00:59:54.661521531 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (60bb04c4000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04c4000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73581] [05/Jun/2021:00:59:54.663859196 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [73852] [05/Jun/2021:00:59:54.666324804 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74000] [05/Jun/2021:00:59:54.669296148 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No changes to send DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74163] [05/Jun/2021:00:59:54.676056143 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74332] [05/Jun/2021:00:59:54.679092722 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74505] [05/Jun/2021:00:59:54.681807948 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74627] [05/Jun/2021:00:59:56.407153239 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04ca0001:1622869194:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74748] [05/Jun/2021:00:59:56.437929141 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04cc0000:1622869196:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [74911] [05/Jun/2021:00:59:56.441290223 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04cc000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75091] [05/Jun/2021:00:59:56.446087169 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca44000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75336] [05/Jun/2021:00:59:56.449505787 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75511] [05/Jun/2021:00:59:56.452853431 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04cc000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75649] [05/Jun/2021:00:59:56.456460892 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75779] [05/Jun/2021:00:59:56.459253553 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [75917] [05/Jun/2021:00:59:56.461975428 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76045] [05/Jun/2021:00:59:56.464857266 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76162] [05/Jun/2021:00:59:56.467700343 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76336] [05/Jun/2021:00:59:56.471881055 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76518] [05/Jun/2021:00:59:56.475178278 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76689] [05/Jun/2021:00:59:56.477626795 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [76856] [05/Jun/2021:00:59:56.481479033 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77037] [05/Jun/2021:00:59:56.484504970 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77152] [05/Jun/2021:00:59:56.487196505 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04cc0002:1622869196:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77332] [05/Jun/2021:00:59:56.490832060 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77487] [05/Jun/2021:00:59:56.493506388 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77699] [05/Jun/2021:00:59:56.496186538 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04c4000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [77879] [05/Jun/2021:00:59:56.498764616 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78034] [05/Jun/2021:00:59:56.501822225 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78246] [05/Jun/2021:00:59:56.504313400 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04cc000000010000 60bb04cc DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78407] [05/Jun/2021:00:59:56.507087828 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3283800 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78633] [05/Jun/2021:00:59:56.509695003 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3283800 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [78927] [05/Jun/2021:00:59:56.512307924 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04cc000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04c4000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79040] [05/Jun/2021:00:59:56.514535124 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04c4000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79230] [05/Jun/2021:00:59:56.516947860 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04c4000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79385] [05/Jun/2021:00:59:56.519469330 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79497] [05/Jun/2021:00:59:56.522216790 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79716] [05/Jun/2021:00:59:56.525312474 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04cc000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [79848] [05/Jun/2021:00:59:56.528084994 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80045] [05/Jun/2021:00:59:56.530915504 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80177] [05/Jun/2021:00:59:56.535151258 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80490] [05/Jun/2021:00:59:56.538026237 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04cc000000010000) csnMax (60bb04cc000000010000) csnBuf (60bb04cc000000010000) csnConsumerMax (60bb04cc000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80613] [05/Jun/2021:00:59:56.540832821 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80796] [05/Jun/2021:00:59:56.543684845 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [80908] [05/Jun/2021:00:59:56.547097729 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81041] [05/Jun/2021:00:59:56.551070067 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81171] [05/Jun/2021:00:59:56.554492125 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81304] [05/Jun/2021:00:59:56.557683759 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81437] [05/Jun/2021:00:59:56.561520817 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81570] [05/Jun/2021:00:59:56.566406292 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81703] [05/Jun/2021:00:59:56.573302409 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81836] [05/Jun/2021:00:59:56.584669619 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [81969] [05/Jun/2021:00:59:56.603929620 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82102] [05/Jun/2021:00:59:56.639347504 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82215] [05/Jun/2021:00:59:56.651206190 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82324] [05/Jun/2021:00:59:56.706726138 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82595] [05/Jun/2021:00:59:56.714831690 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82758] [05/Jun/2021:00:59:56.721355166 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [82927] [05/Jun/2021:00:59:56.724266847 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83100] [05/Jun/2021:00:59:56.726974836 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83100] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 83100 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_eight | 11.78 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed (fix for 47721) INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'14' (expected 15) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'15' (expected 16) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:625 test_schema_replication_eight supplier_schema_csn=b'60bb04d0000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:626 ctest_schema_replication_eight onsumer_schema_csn=b'60bb04d0000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 83101 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83208] 05/Jun/2021:00:59:59.942558026 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83316] [05/Jun/2021:01:00:00.009204457 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83438] [05/Jun/2021:01:00:00.021953551 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04cc0002:1622869196:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83559] [05/Jun/2021:01:00:00.025668241 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04d00000:1622869200:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83722] [05/Jun/2021:01:00:00.028390377 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04d0000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [83902] [05/Jun/2021:01:00:00.031396747 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca4c000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84147] [05/Jun/2021:01:00:00.034806850 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84322] [05/Jun/2021:01:00:00.038076206 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04d0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84460] [05/Jun/2021:01:00:00.041532577 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84590] [05/Jun/2021:01:00:00.044636818 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84728] [05/Jun/2021:01:00:00.047876360 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84856] [05/Jun/2021:01:00:00.050894329 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [84973] [05/Jun/2021:01:00:00.054061573 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85147] [05/Jun/2021:01:00:00.060410290 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85329] [05/Jun/2021:01:00:00.063751526 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85500] [05/Jun/2021:01:00:00.066767798 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85667] [05/Jun/2021:01:00:00.071360541 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [85848] [05/Jun/2021:01:00:00.074530469 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86032] [05/Jun/2021:01:00:00.077272809 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04d0000000000000 / remotecsn:60bb04c4000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86197] [05/Jun/2021:01:00:00.243808885 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'supplierNewOCC' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86362] [05/Jun/2021:01:00:00.255697672 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'consumerNewOCA' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86516] [05/Jun/2021:01:00:00.259118668 -0400] - DEBUG - schema_oc_superset_check - Remote consumerNewOCA schema objectclasses is a superset of the received one. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86681] [05/Jun/2021:01:00:00.264647317 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'supplierNewOCC' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86846] [05/Jun/2021:01:00:00.267612559 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'consumerNewOCA' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [86996] [05/Jun/2021:01:00:00.271507459 -0400] - DEBUG - schema_list_oc2learn - Add that unknown/extended objectclass consumerNewOCA (1.2.3.4.5.6.7.8.9.10.1) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87296] [05/Jun/2021:01:00:00.274572975 -0400] - DEBUG - schema_oc_to_string - Replace (old[265]=( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' )) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87625] [05/Jun/2021:01:00:00.278357563 -0400] - DEBUG - supplier_get_new_definitions - supplier takes objectclass: ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street $ postOfficeBox ) X-ORIGIN 'user defined' ) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [87941] [05/Jun/2021:01:00:00.298854880 -0400] - DEBUG - modify_schema_prepare_mods - MOD[1] del (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP 'person' AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street ) X-ORIGIN 'blahblahblah' ) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88268] [05/Jun/2021:01:00:00.308367855 -0400] - DEBUG - modify_schema_prepare_mods - MOD[0] add (objectclasses): ( 1.2.3.4.5.6.7.8.9.10.1 NAME 'consumerNewOCA' DESC 'To test ticket 47490' SUP person AUXILIARY MUST ( postalAddress $ preferredLocale $ telexNumber ) MAY ( postalCode $ street $ postOfficeBox ) X-ORIGIN 'user defined' ) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88376] [05/Jun/2021:01:00:00.311959261 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88499] [05/Jun/2021:01:00:00.317358821 -0400] - DEBUG - modify_schema_internal_mod - Successfully learn objectclasses definitions DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88712] [05/Jun/2021:01:00:00.320436356 -0400] - ERR - NSMMReplicationPlugin - update_consumer_schema - [S] Schema agmt="cn=201" (ci-vm-10-0-139-64:39201) must not be overwritten (set replication log for additional info) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 88712 | |||
Passed | suites/schema/schema_replication_test.py::test_schema_replication_nine | 14.24 | |
-------------------------------Captured log call-------------------------------- INFO lib389:schema_replication_test.py:41 ############################################### INFO lib389:schema_replication_test.py:42 ####### INFO lib389:schema_replication_test.py:43 ####### Same OC - extra MAY: Schema is pushed - no error INFO lib389:schema_replication_test.py:44 ####### INFO lib389:schema_replication_test.py:45 ################################################### DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'16' (expected 17) INFO lib389:agreement.py:1193 Pausing replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=201,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:152 trigger_update: receive b'17' (expected 18) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:683 test_schema_replication_nine supplier_schema_csn=b'60bb04d9000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:684 ctest_schema_replication_nine onsumer_schema_csn=b'60bb04d9000000000000' DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:55 _pattern_errorlog: start at offset 88713 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [88849] 05/Jun/2021:01:00:00.324678621 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] schema definitions may have been learned DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89014] [05/Jun/2021:01:00:00.453379070 -0400] - DEBUG - schema_oc_compare_strict - Attribute postOfficeBox is not allowed in 'supplierNewOCC' of the remote consumer schema DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89148] [05/Jun/2021:01:00:00.535921929 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04cd000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89332] [05/Jun/2021:01:00:00.542976985 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89447] [05/Jun/2021:01:00:00.738560482 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04d00002:1622869200:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89627] [05/Jun/2021:01:00:00.743165946 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89782] [05/Jun/2021:01:00:00.746178186 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [89994] [05/Jun/2021:01:00:00.749532992 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04cc000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90174] [05/Jun/2021:01:00:00.753397117 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90329] [05/Jun/2021:01:00:00.756485035 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90541] [05/Jun/2021:01:00:00.759139013 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d0000000010000 60bb04d0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90702] [05/Jun/2021:01:00:00.761600013 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3283800 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [90928] [05/Jun/2021:01:00:00.764331847 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3283800 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91222] [05/Jun/2021:01:00:00.767093254 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04d0000000010000) csnBuf (60bb04cc000000010000) csnConsumerMax (60bb04cc000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91335] [05/Jun/2021:01:00:00.770016535 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04cc000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91525] [05/Jun/2021:01:00:00.773139799 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04cc000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91680] [05/Jun/2021:01:00:00.776163977 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [91792] [05/Jun/2021:01:00:00.779150355 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92011] [05/Jun/2021:01:00:00.782866779 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04d0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92143] [05/Jun/2021:01:00:00.797765506 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92340] [05/Jun/2021:01:00:00.801075452 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92653] [05/Jun/2021:01:00:00.803870671 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04d0000000010000) csnMax (60bb04d0000000010000) csnBuf (60bb04d0000000010000) csnConsumerMax (60bb04d0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92776] [05/Jun/2021:01:00:00.806653752 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [92959] [05/Jun/2021:01:00:00.809924653 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93071] [05/Jun/2021:01:00:00.816099428 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93204] [05/Jun/2021:01:00:00.819548897 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93334] [05/Jun/2021:01:00:00.822713130 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 19, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93467] [05/Jun/2021:01:00:00.827672724 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93600] [05/Jun/2021:01:00:00.831979243 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93733] [05/Jun/2021:01:00:00.837146038 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93866] [05/Jun/2021:01:00:00.845519487 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [93999] [05/Jun/2021:01:00:00.857294726 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94132] [05/Jun/2021:01:00:00.877334174 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94265] [05/Jun/2021:01:00:00.913873469 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94378] [05/Jun/2021:01:00:00.919630683 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 19 19 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94487] [05/Jun/2021:01:00:00.981535275 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94758] [05/Jun/2021:01:00:00.985038301 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [94921] [05/Jun/2021:01:00:00.991785055 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95090] [05/Jun/2021:01:00:00.994645187 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95263] [05/Jun/2021:01:00:00.997516144 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95371] [05/Jun/2021:01:00:01.065155121 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95541] [05/Jun/2021:01:00:01.068675097 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95715] [05/Jun/2021:01:00:01.078335380 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [95889] [05/Jun/2021:01:00:01.081128525 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96060] [05/Jun/2021:01:00:01.084385613 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96234] [05/Jun/2021:01:00:01.087479397 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96398] [05/Jun/2021:01:00:01.178545901 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-64:39201): Protocol stopped after 0 seconds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96526] [05/Jun/2021:01:00:01.183246907 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96711] [05/Jun/2021:01:00:01.185942946 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d0000000010000 60bb04d0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96885] [05/Jun/2021:01:00:01.188700466 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [96993] [05/Jun/2021:01:00:01.191408921 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97101] [05/Jun/2021:01:00:06.208597661 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97270] [05/Jun/2021:01:00:06.216928245 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97444] [05/Jun/2021:01:00:06.220160948 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97618] [05/Jun/2021:01:00:06.223063156 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97789] [05/Jun/2021:01:00:06.226410420 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: start -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [97957] [05/Jun/2021:01:00:06.229561165 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Trying non-secure slapi_ldap_init_ext DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98483] [05/Jun/2021:01:00:06.233197595 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - binddn = cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUXlOakF4TVdGa1ppMDNNVEpsTTJSbA0KTUMwNFlqUTBZMlZqTWkwM09HUTVNbVF4WlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCREtKdE9GWGZvN2F1QmNXQmRmRHIxcg==}Y9/HSfwfe+8dvHcyCrkEEKCbalgDnZKakfnYqz6GBjQGfLP/l21cFRi96L3sQaBd DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98528] +6FG8ZXZVBnUykV//FZxFhUVYM8a5I6n4251ViEjItA= DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98702] [05/Jun/2021:01:00:06.237436357 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98824] [05/Jun/2021:01:00:06.241719389 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04d00002:1622869200:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [98945] [05/Jun/2021:01:00:06.244747132 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04d60000:1622869206:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99112] [05/Jun/2021:01:00:06.249392979 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99293] [05/Jun/2021:01:00:06.252403833 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99477] [05/Jun/2021:01:00:06.257914268 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04d0000000000000 / remotecsn:60bb04cd000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99611] [05/Jun/2021:01:00:06.463201533 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04d0000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99726] [05/Jun/2021:01:00:06.467616704 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04d60001:1622869206:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [99906] [05/Jun/2021:01:00:06.475117713 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100061] [05/Jun/2021:01:00:06.483093684 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100273] [05/Jun/2021:01:00:06.486404592 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d0000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100453] [05/Jun/2021:01:00:06.492208383 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100608] [05/Jun/2021:01:00:06.496406932 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [100820] [05/Jun/2021:01:00:06.499822650 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d0000000010000 60bb04d0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101114] [05/Jun/2021:01:00:06.502845817 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (60bb04d0000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04d0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101237] [05/Jun/2021:01:00:06.505568663 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101508] [05/Jun/2021:01:00:06.508308151 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101656] [05/Jun/2021:01:00:06.511336770 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No changes to send DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101819] [05/Jun/2021:01:00:06.522582884 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [101988] [05/Jun/2021:01:00:06.528388496 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102161] [05/Jun/2021:01:00:06.532034162 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102283] [05/Jun/2021:01:00:08.226824502 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04d60001:1622869206:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102404] [05/Jun/2021:01:00:08.230165761 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04d80000:1622869208:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102567] [05/Jun/2021:01:00:08.233177585 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04d8000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102747] [05/Jun/2021:01:00:08.235894979 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca50000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [102992] [05/Jun/2021:01:00:08.239018518 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103167] [05/Jun/2021:01:00:08.241891567 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04d8000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103305] [05/Jun/2021:01:00:08.244959033 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103435] [05/Jun/2021:01:00:08.247680934 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103573] [05/Jun/2021:01:00:08.250467472 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103701] [05/Jun/2021:01:00:08.253247256 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103818] [05/Jun/2021:01:00:08.256189026 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [103992] [05/Jun/2021:01:00:08.263303151 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104174] [05/Jun/2021:01:00:08.269017063 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104345] [05/Jun/2021:01:00:08.272414045 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104512] [05/Jun/2021:01:00:08.277811426 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104693] [05/Jun/2021:01:00:08.285313416 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104808] [05/Jun/2021:01:00:08.289705715 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04d80002:1622869208:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [104988] [05/Jun/2021:01:00:08.293300854 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105143] [05/Jun/2021:01:00:08.296430656 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105355] [05/Jun/2021:01:00:08.299881759 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d0000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105535] [05/Jun/2021:01:00:08.302847870 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105690] [05/Jun/2021:01:00:08.306093052 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [105902] [05/Jun/2021:01:00:08.309310862 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d8000000010000 60bb04d8 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106063] [05/Jun/2021:01:00:08.312221161 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3360000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106289] [05/Jun/2021:01:00:08.315469895 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3360000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106583] [05/Jun/2021:01:00:08.318785157 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04d8000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04d0000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106696] [05/Jun/2021:01:00:08.321876086 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04d0000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [106886] [05/Jun/2021:01:00:08.325194681 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04d0000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107041] [05/Jun/2021:01:00:08.331642462 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107153] [05/Jun/2021:01:00:08.335402313 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107372] [05/Jun/2021:01:00:08.338549904 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04d8000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107504] [05/Jun/2021:01:00:08.341428892 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107701] [05/Jun/2021:01:00:08.344946565 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [107833] [05/Jun/2021:01:00:08.349723819 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108146] [05/Jun/2021:01:00:08.352642270 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04d8000000010000) csnMax (60bb04d8000000010000) csnBuf (60bb04d8000000010000) csnConsumerMax (60bb04d8000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108269] [05/Jun/2021:01:00:08.355433758 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108452] [05/Jun/2021:01:00:08.359135742 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108564] [05/Jun/2021:01:00:08.362086298 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108697] [05/Jun/2021:01:00:08.365059617 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108827] [05/Jun/2021:01:00:08.368156484 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 10, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [108960] [05/Jun/2021:01:00:08.371336325 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109093] [05/Jun/2021:01:00:08.375302837 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109226] [05/Jun/2021:01:00:08.380535320 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109359] [05/Jun/2021:01:00:08.387702048 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109492] [05/Jun/2021:01:00:08.399359945 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109625] [05/Jun/2021:01:00:08.418705657 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109758] [05/Jun/2021:01:00:08.455405770 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109871] [05/Jun/2021:01:00:08.465370027 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 10 10 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [109980] [05/Jun/2021:01:00:08.523301286 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110251] [05/Jun/2021:01:00:08.549825704 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110414] [05/Jun/2021:01:00:08.559438718 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110583] [05/Jun/2021:01:00:08.563040478 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110756] [05/Jun/2021:01:00:08.567821276 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110864] [05/Jun/2021:01:00:09.546815257 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [110972] [05/Jun/2021:01:00:09.619105260 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111094] [05/Jun/2021:01:00:09.637482360 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04d80002:1622869208:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111215] [05/Jun/2021:01:00:09.642899808 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04d90000:1622869209:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111378] [05/Jun/2021:01:00:09.646134497 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04d9000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111558] [05/Jun/2021:01:00:09.649299395 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca58000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111803] [05/Jun/2021:01:00:09.652681723 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [111978] [05/Jun/2021:01:00:09.656894496 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04d9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112116] [05/Jun/2021:01:00:09.659749891 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112246] [05/Jun/2021:01:00:09.662924968 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112384] [05/Jun/2021:01:00:09.666126585 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112512] [05/Jun/2021:01:00:09.668882923 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112629] [05/Jun/2021:01:00:09.671997043 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112803] [05/Jun/2021:01:00:09.677819643 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [112985] [05/Jun/2021:01:00:09.681195946 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113156] [05/Jun/2021:01:00:09.684329324 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113323] [05/Jun/2021:01:00:09.691679600 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113504] [05/Jun/2021:01:00:09.694734239 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113688] [05/Jun/2021:01:00:09.700844329 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04d9000000000000 / remotecsn:60bb04d0000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [113822] [05/Jun/2021:01:00:09.920378931 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04d0000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114006] [05/Jun/2021:01:00:09.923789746 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - Schema checking successful: ok to push the schema (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114121] [05/Jun/2021:01:00:10.111480835 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04d90002:1622869209:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114243] [05/Jun/2021:01:00:10.117023676 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04d90002:1622869209:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114364] [05/Jun/2021:01:00:10.120256257 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04da0000:1622869210:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114544] [05/Jun/2021:01:00:10.123349775 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114699] [05/Jun/2021:01:00:10.126273115 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [114911] [05/Jun/2021:01:00:10.129037130 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d8000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115091] [05/Jun/2021:01:00:10.135019221 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115246] [05/Jun/2021:01:00:10.138573905 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115458] [05/Jun/2021:01:00:10.141760882 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d9000000010000 60bb04d9 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115619] [05/Jun/2021:01:00:10.144744135 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3360000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [115845] [05/Jun/2021:01:00:10.148159540 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3360000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116139] [05/Jun/2021:01:00:10.151532366 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04d9000000010000) csnBuf (60bb04d8000000010000) csnConsumerMax (60bb04d8000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116252] [05/Jun/2021:01:00:10.155411999 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04d8000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116442] [05/Jun/2021:01:00:10.158768080 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04d8000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116597] [05/Jun/2021:01:00:10.161950716 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116709] [05/Jun/2021:01:00:10.165186414 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [116928] [05/Jun/2021:01:00:10.168267647 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04d9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117060] [05/Jun/2021:01:00:10.171448272 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117257] [05/Jun/2021:01:00:10.174635553 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117390] [05/Jun/2021:01:00:10.177607262 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117520] [05/Jun/2021:01:00:10.180470681 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 17, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117653] [05/Jun/2021:01:00:10.183640274 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [117966] [05/Jun/2021:01:00:10.186913704 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04d9000000010000) csnMax (60bb04d9000000010000) csnBuf (60bb04d9000000010000) csnConsumerMax (60bb04d9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118089] [05/Jun/2021:01:00:10.189815966 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118272] [05/Jun/2021:01:00:10.192542907 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118385] [05/Jun/2021:01:00:10.197901557 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 17 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118518] [05/Jun/2021:01:00:10.202022828 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 17 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118627] [05/Jun/2021:01:00:10.206909792 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [118898] [05/Jun/2021:01:00:10.210255259 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119061] [05/Jun/2021:01:00:10.216506082 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119230] [05/Jun/2021:01:00:10.219420796 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119403] [05/Jun/2021:01:00:10.222335745 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119511] [05/Jun/2021:01:00:10.225252187 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119628] [05/Jun/2021:01:00:10.232742894 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119736] [05/Jun/2021:01:00:10.683281561 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [119906] [05/Jun/2021:01:00:10.689834945 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now disabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120080] [05/Jun/2021:01:00:10.693887338 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120254] [05/Jun/2021:01:00:10.697253293 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120425] [05/Jun/2021:01:00:10.700579775 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120599] [05/Jun/2021:01:00:10.704460373 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120763] [05/Jun/2021:01:00:10.794035016 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_stop - agmt="cn=201" (ci-vm-10-0-139-64:39201): Protocol stopped after 0 seconds DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [120891] [05/Jun/2021:01:00:10.797754385 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121076] [05/Jun/2021:01:00:10.801125504 -0400] - DEBUG - NSMMReplicationPlugin - Database RUV: {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d9000000010000 60bb04d9 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121250] [05/Jun/2021:01:00:10.804511990 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121358] [05/Jun/2021:01:00:10.807605163 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121466] [05/Jun/2021:01:00:15.823865904 -0400] - DEBUG - replication - copy_operation_parameters - replica is null. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121635] [05/Jun/2021:01:00:15.829399313 -0400] - DEBUG - NSMMReplicationPlugin - agmt_set_enabled_from_entry: agreement is now enabled (agmt="cn=201" (ci-vm-10-0-139-64:39201)) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121809] [05/Jun/2021:01:00:15.834521867 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [121983] [05/Jun/2021:01:00:15.837828832 -0400] - DEBUG - NSMMReplicationPlugin - close_connection_internal - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Disconnected from the consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122154] [05/Jun/2021:01:00:15.840593269 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: start -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122322] [05/Jun/2021:01:00:15.843323900 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Trying non-secure slapi_ldap_init_ext DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122848] [05/Jun/2021:01:00:15.846822046 -0400] - DEBUG - NSMMReplicationPlugin - conn_connect - agmt="cn=201" (ci-vm-10-0-139-64:39201) - binddn = cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com, passwd = {AES-TUhNR0NTcUdTSWIzRFFFRkRUQm1NRVVHQ1NxR1NJYjNEUUVGRERBNEJDUXlOakF4TVdGa1ppMDNNVEpsTTJSbA0KTUMwNFlqUTBZMlZqTWkwM09HUTVNbVF4WlFBQ0FRSUNBU0F3Q2dZSUtvWklodmNOQWdjd0hRWUpZSVpJQVdVRA0KQkFFcUJCREtKdE9GWGZvN2F1QmNXQmRmRHIxcg==}Y9/HSfwfe+8dvHcyCrkEEKCbalgDnZKakfnYqz6GBjQGfLP/l21cFRi96L3sQaBd DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [122893] +6FG8ZXZVBnUykV//FZxFhUVYM8a5I6n4251ViEjItA= DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123067] [05/Jun/2021:01:00:15.850900520 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - No linger to cancel on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123189] [05/Jun/2021:01:00:15.855454936 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04da0000:1622869210:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123310] [05/Jun/2021:01:00:15.860292823 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04df0000:1622869215:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123477] [05/Jun/2021:01:00:15.864631520 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123658] [05/Jun/2021:01:00:15.868158537 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123842] [05/Jun/2021:01:00:15.871407606 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Checking consumer schema localcsn:60bb04d9000000000000 / remotecsn:60bb04d0000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [123976] [05/Jun/2021:01:00:16.090212114 -0400] - DEBUG - NSMMReplicationPlugin - conn_push_schema - [S] Reread remotecsn:60bb04d9000000000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124091] [05/Jun/2021:01:00:16.094105977 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04df0001:1622869215:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124213] [05/Jun/2021:01:00:16.097161234 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04df0001:1622869215:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124334] [05/Jun/2021:01:00:16.100282020 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04e00000:1622869216:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124514] [05/Jun/2021:01:00:16.103959307 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124669] [05/Jun/2021:01:00:16.107102044 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [124881] [05/Jun/2021:01:00:16.110350155 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d9000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125061] [05/Jun/2021:01:00:16.113394990 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125216] [05/Jun/2021:01:00:16.116353993 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125428] [05/Jun/2021:01:00:16.119117925 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d9000000010000 60bb04d9 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125722] [05/Jun/2021:01:00:16.121969770 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax () csnMax (60bb04d9000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04d9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [125845] [05/Jun/2021:01:00:16.124789644 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126116] [05/Jun/2021:01:00:16.127723721 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=0 sent=0 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126264] [05/Jun/2021:01:00:16.130633138 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No changes to send DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126427] [05/Jun/2021:01:00:16.137170348 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126596] [05/Jun/2021:01:00:16.140246646 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126769] [05/Jun/2021:01:00:16.143606815 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [126891] [05/Jun/2021:01:00:17.842454203 -0400] - DEBUG - _csngen_adjust_local_time - gen state before 60bb04e00000:1622869216:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127012] [05/Jun/2021:01:00:17.846590487 -0400] - DEBUG - _csngen_adjust_local_time - gen state after 60bb04e10000:1622869217:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127175] [05/Jun/2021:01:00:17.849811885 -0400] - DEBUG - NSMMReplicationPlugin - ruv_add_csn_inprogress - Successfully inserted csn 60bb04e1000000010000 into pending list DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127355] [05/Jun/2021:01:00:17.852973118 -0400] - DEBUG - NSMMReplicationPlugin - purge_entry_state_information - From entry cn=test_entry,dc=example,dc=com up to CSN 60b1ca59000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127600] [05/Jun/2021:01:00:17.858731012 -0400] - DEBUG - NSMMReplicationPlugin - write_changelog_and_ruv - Writing change for cn=test_entry,dc=example,dc=com (uniqid: abb09f13-c5ba11eb-b04efb19-4c9097f7, optype: 8) to changelog csn 60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127775] [05/Jun/2021:01:00:17.863243070 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - cl5WriteOperationTxn - Successfully written entry with csn (60bb04e1000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [127913] [05/Jun/2021:01:00:17.866308260 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: committing all csns for csn 60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128043] [05/Jun/2021:01:00:17.869490718 -0400] - DEBUG - NSMMReplicationPlugin - csnplCommitALL: processing data csn 60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128181] [05/Jun/2021:01:00:17.872630304 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Successfully committed csn 60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128309] [05/Jun/2021:01:00:17.875650905 -0400] - DEBUG - NSMMReplicationPlugin - ruv_update_ruv - Rolled up to csn 60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128426] [05/Jun/2021:01:00:17.878842806 -0400] - DEBUG - replication - multisupplier_mmr_postop - error 0 for operation 561. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128600] [05/Jun/2021:01:00:17.883897334 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128782] [05/Jun/2021:01:00:17.887050364 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: wait_for_changes -> ready_to_acquire_replica DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [128953] [05/Jun/2021:01:00:17.890631345 -0400] - DEBUG - NSMMReplicationPlugin - conn_cancel_linger - agmt="cn=201" (ci-vm-10-0-139-64:39201) - Canceling linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129120] [05/Jun/2021:01:00:17.897658168 -0400] - DEBUG - NSMMReplicationPlugin - acquire_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Replica was successfully acquired. DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129301] [05/Jun/2021:01:00:17.901592355 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: ready_to_acquire_replica -> sending_updates DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129416] [05/Jun/2021:01:00:17.904637248 -0400] - DEBUG - csngen_adjust_time - gen state before 60bb04e10002:1622869217:0:0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129596] [05/Jun/2021:01:00:17.907464790 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Consumer RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129751] [05/Jun/2021:01:00:17.910566854 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [129963] [05/Jun/2021:01:00:17.913665245 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04d9000000010000 00000000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130143] [05/Jun/2021:01:00:17.916850248 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - _cl5PositionCursorForReplay - (agmt="cn=201" (ci-vm-10-0-139-64:39201)): Supplier RUV: DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130298] [05/Jun/2021:01:00:17.920407737 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replicageneration} 60bb047e000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130510] [05/Jun/2021:01:00:17.923268713 -0400] - DEBUG - NSMMReplicationPlugin - agmt="cn=201" (ci-vm-10-0-139-64:39201): {replica 1 ldap://localhost.localdomain:39001} 60bb047e000100010000 60bb04e1000000010000 60bb04e1 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130671] [05/Jun/2021:01:00:17.926499783 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - found thread private buffer cache 0x7f53f3362800 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [130897] [05/Jun/2021:01:00:17.929695729 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_buffer - _pool is 0x7f5423787260 _pool->pl_busy_lists is 0x7f53f3217410 _pool->pl_busy_lists->bl_buffers is 0x7f53f3362800 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131191] [05/Jun/2021:01:00:17.932365469 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_initial_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 0) - csnPrevMax () csnMax (60bb04e1000000010000) csnBuf (00000000000000000000) csnConsumerMax (60bb04d9000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131304] [05/Jun/2021:01:00:17.935287773 -0400] - DEBUG - clcache_initial_anchorcsn - anchor is now: 60bb04d9000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131494] [05/Jun/2021:01:00:17.938430323 -0400] - DEBUG - NSMMReplicationPlugin - changelog program - agmt="cn=201" (ci-vm-10-0-139-64:39201): CSN 60bb04d9000000010000 found, position set for replay DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131649] [05/Jun/2021:01:00:17.941380826 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_get_next_change - load=1 rec=1 csn=60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131761] [05/Jun/2021:01:00:17.944409311 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Starting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [131893] [05/Jun/2021:01:00:17.947415406 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132112] [05/Jun/2021:01:00:17.950305913 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Sending modify operation (dn="cn=test_entry,dc=example,dc=com" csn=60bb04e1000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132244] [05/Jun/2021:01:00:17.953546919 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132441] [05/Jun/2021:01:00:17.956095365 -0400] - DEBUG - NSMMReplicationPlugin - replay_update - agmt="cn=201" (ci-vm-10-0-139-64:39201): Receiver successfully sent operation with csn 60bb04e1000000010000 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132573] [05/Jun/2021:01:00:17.958614593 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [132886] [05/Jun/2021:01:00:17.961289694 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_adjust_anchorcsn - agmt="cn=201" (ci-vm-10-0-139-64:39201) - (cscb 0 - state 1) - csnPrevMax (60bb04e1000000010000) csnMax (60bb04e1000000010000) csnBuf (60bb04e1000000010000) csnConsumerMax (60bb04e1000000010000) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133009] [05/Jun/2021:01:00:17.963843879 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_load_buffer - rc=-12797 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133192] [05/Jun/2021:01:00:17.971123180 -0400] - DEBUG - NSMMReplicationPlugin - send_updates - agmt="cn=201" (ci-vm-10-0-139-64:39201): No more updates to send (cl5GetNextOperationToReplay) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133304] [05/Jun/2021:01:00:17.974379930 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 0 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133436] [05/Jun/2021:01:00:17.977176045 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133568] [05/Jun/2021:01:00:17.987965593 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133701] [05/Jun/2021:01:00:18.011204948 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133831] [05/Jun/2021:01:00:18.014738431 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Result 3, 0, 0, 11, (null) DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [133964] [05/Jun/2021:01:00:18.017456405 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134097] [05/Jun/2021:01:00:18.021324757 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134230] [05/Jun/2021:01:00:18.026353280 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134363] [05/Jun/2021:01:00:18.033503744 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134496] [05/Jun/2021:01:00:18.044821525 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134629] [05/Jun/2021:01:00:18.064105711 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain - Read result for message_id 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134742] [05/Jun/2021:01:00:18.077325240 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_waitfor_async_results - 11 11 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [134851] [05/Jun/2021:01:00:18.099741929 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_result_threadmain exiting DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [135122] [05/Jun/2021:01:00:18.103299955 -0400] - DEBUG - agmt="cn=201" (ci-vm-10-0-139-64:39201) - clcache_return_buffer - session end: state=5 load=1 sent=1 skipped=0 skipped_new_rid=0 skipped_csn_gt_cons_maxcsn=0 skipped_up_to_date=0 skipped_csn_gt_ruv=0 skipped_csn_covered=0 DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [135285] [05/Jun/2021:01:00:18.109833701 -0400] - DEBUG - NSMMReplicationPlugin - release_replica - agmt="cn=201" (ci-vm-10-0-139-64:39201): Successfully released consumer DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [135454] [05/Jun/2021:01:00:18.113025196 -0400] - DEBUG - NSMMReplicationPlugin - conn_start_linger -agmt="cn=201" (ci-vm-10-0-139-64:39201) - Beginning linger on the connection DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [135627] [05/Jun/2021:01:00:18.120201205 -0400] - DEBUG - NSMMReplicationPlugin - repl5_inc_run - agmt="cn=201" (ci-vm-10-0-139-64:39201): State: sending_updates -> wait_for_changes DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:62 _pattern_errorlog: [135627] DEBUG tests.suites.schema.schema_replication_test:schema_replication_test.py:67 _pattern_errorlog: end at offset 135627 INFO tests.suites.schema.schema_replication_test:schema_replication_test.py:693 Testcase PASSED | |||
Passed | suites/schema/schema_test.py::test_schema_comparewithfiles | 10.33 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.schema.schema_test:schema_test.py:124 Running test_schema_comparewithfiles... WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /etc/dirsrv/slapd-standalone1/schema/99user.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60samba3.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/05rfc4524.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/28pilot.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60nss-ldap.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60mozilla.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/50ns-mail.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/00core.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/10dna-plugin.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/10automember-plugin.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/03entryuuid.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60eduperson.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/50ns-web.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/05rfc2927.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60autofs.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60acctpolicy.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/10mep-plugin.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/10rfc2307compat.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/50ns-admin.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/01core389.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/05rfc4523.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/25java-object.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60sudo.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60pam-plugin.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60pureftpd.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/50ns-directory.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/20subscriber.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60rfc3712.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60trust.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/06inetorgperson.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/02common.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/30ns-common.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/50ns-certificate.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60posix-winsync-plugin.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60rfc2739.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/50ns-value.ldif as a schema file - skipping WARNING tests.suites.schema.schema_test:schema_test.py:136 Unable to parse /usr/share/dirsrv/schema/60sabayon.ldif as a schema file - skipping INFO tests.suites.schema.schema_test:schema_test.py:165 test_schema_comparewithfiles: PASSED | |||
Passed | suites/setup_ds/dscreate_test.py::test_setup_ds_minimal_dry | 0.22 | |
-------------------------------Captured log setup------------------------------- DEBUG lib389:dscreate_test.py:42 Instance allocated DEBUG lib389:__init__.py:547 Allocate <class 'lib389.DirSrv'> with None DEBUG lib389:__init__.py:570 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321 DEBUG lib389:__init__.py:595 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321 DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone -------------------------------Captured log call-------------------------------- DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone INFO LogCapture.SetupDs:setup.py:670 NOOP: Dry run requested DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone -----------------------------Captured log teardown------------------------------ DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone | |||
Passed | suites/setup_ds/dscreate_test.py::test_setup_ds_minimal | 21.08 | |
-------------------------------Captured log setup------------------------------- DEBUG lib389:dscreate_test.py:42 Instance allocated DEBUG lib389:__init__.py:547 Allocate <class 'lib389.DirSrv'> with None DEBUG lib389:__init__.py:570 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321 DEBUG lib389:__init__.py:595 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321 DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone -------------------------------Captured log call-------------------------------- DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone DEBUG lib389:__init__.py:547 Allocate <class 'lib389.DirSrv'> with None DEBUG lib389:__init__.py:559 Allocate <class 'lib389.DirSrv'> with /var/run/slapd-standalone.socket DEBUG lib389:__init__.py:570 Allocate <class 'lib389.DirSrv'> with localhost:54321 DEBUG lib389:__init__.py:595 Allocate <class 'lib389.DirSrv'> with localhost:54321 DEBUG lib389:nss_ssl.py:197 nss cmd: /usr/bin/certutil -N -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt DEBUG lib389:nss_ssl.py:199 nss output: DEBUG lib389.nss_ssl:nss_ssl.py:314 nss cmd: /usr/bin/certutil -L -n Self-Signed-CA -d /etc/dirsrv/ssca/ DEBUG lib389:nss_ssl.py:559 CSR subject -> CN=localhost.localdomain,givenName=1b39b979-ea5d-4500-b257-175edc1c120c,O=testing,L=389ds,ST=Queensland,C=AU DEBUG lib389:nss_ssl.py:560 CSR alt_names -> ['localhost.localdomain'] DEBUG lib389:nss_ssl.py:592 nss cmd: /usr/bin/certutil -R --keyUsage digitalSignature,nonRepudiation,keyEncipherment,dataEncipherment --nsCertType sslClient,sslServer --extKeyUsage clientAuth,serverAuth -s CN=localhost.localdomain,givenName=1b39b979-ea5d-4500-b257-175edc1c120c,O=testing,L=389ds,ST=Queensland,C=AU -8 localhost.localdomain -g 4096 -d /etc/dirsrv/slapd-standalone -z /etc/dirsrv/slapd-standalone/noise.txt -f /etc/dirsrv/slapd-standalone/pwdfile.txt -a -o /etc/dirsrv/slapd-standalone/Server-Cert.csr DEBUG lib389.nss_ssl:nss_ssl.py:618 nss cmd: /usr/bin/certutil -C -d /etc/dirsrv/ssca/ -f /etc/dirsrv/ssca//pwdfile.txt -v 24 -a -i /etc/dirsrv/slapd-standalone/Server-Cert.csr -o /etc/dirsrv/slapd-standalone/Server-Cert.crt -c Self-Signed-CA DEBUG lib389:nss_ssl.py:242 nss cmd: /usr/bin/openssl rehash /etc/dirsrv/slapd-standalone DEBUG lib389:nss_ssl.py:646 nss cmd: /usr/bin/certutil -A -n Self-Signed-CA -t CT,, -a -i /etc/dirsrv/slapd-standalone/ca.crt -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt DEBUG lib389:nss_ssl.py:661 nss cmd: /usr/bin/certutil -A -n Server-Cert -t ,, -a -i /etc/dirsrv/slapd-standalone/Server-Cert.crt -d /etc/dirsrv/slapd-standalone -f /etc/dirsrv/slapd-standalone/pwdfile.txt DEBUG lib389:nss_ssl.py:670 nss cmd: /usr/bin/certutil -V -d /etc/dirsrv/slapd-standalone -n Server-Cert -u YCV DEBUG lib389.utils:utils.py:287 port 636 already in [389, 636, 3268, 3269, 7389], skipping port relabel DEBUG lib389.utils:utils.py:318 CMD: semanage port -a -t ldap_port_t -p tcp 54321 ; STDOUT: ; STDERR: DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389:__init__.py:1073 systemd status -> True DEBUG lib389:__init__.py:930 open(): Connecting to uri ldapi://%2Fvar%2Frun%2Fslapd-standalone.socket DEBUG lib389:__init__.py:938 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:947 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:960 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:971 Using /etc/openldap/ldap.conf certificate policy DEBUG lib389:__init__.py:972 ldap.OPT_X_TLS_REQUIRE_CERT = 2 DEBUG lib389:__init__.py:1005 open(): Using root autobind ... DEBUG lib389:__init__.py:1026 open(): bound as cn=Directory Manager DEBUG lib389:__init__.py:1689 Retrieving entry with [('',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.5 B2021.156.0143 ] DEBUG lib389:__init__.py:930 open(): Connecting to uri ldapi://%2Fvar%2Frun%2Fslapd-standalone.socket DEBUG lib389:__init__.py:938 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:947 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:960 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:971 Using /etc/openldap/ldap.conf certificate policy DEBUG lib389:__init__.py:972 ldap.OPT_X_TLS_REQUIRE_CERT = 2 DEBUG lib389:__init__.py:1005 open(): Using root autobind ... DEBUG lib389:__init__.py:1026 open(): bound as cn=Directory Manager DEBUG lib389:__init__.py:1689 Retrieving entry with [('',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.5 B2021.156.0143 ] DEBUG Config:_mapped_object.py:435 cn=config set REPLACE: ('nsslapd-secureport', '636') DEBUG Config:_mapped_object.py:435 cn=config set REPLACE: ('nsslapd-security', 'on') DEBUG Index:_mapped_object.py:911 Checking "None" under cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config : {'cn': 'entryUUID', 'nsSystemIndex': 'false', 'nsIndexType': ['eq', 'pres']} DEBUG Index:_mapped_object.py:889 Using first property cn: entryUUID as rdn DEBUG Index:_mapped_object.py:915 Validated dn cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config DEBUG Index:_mapped_object.py:942 Creating cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config DEBUG lib389._entry:_entry.py:261 updating dn: cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config DEBUG lib389._entry:_entry.py:267 updated dn: cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config with {'objectclass': [b'top', b'nsIndex']} DEBUG lib389._entry:_entry.py:261 updating dn: cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config DEBUG lib389._entry:_entry.py:267 updated dn: cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config with {'cn': [b'entryUUID'], 'nsSystemIndex': [b'false'], 'nsIndexType': [b'eq', b'pres']} DEBUG Index:_mapped_object.py:948 Created entry cn=entryUUID,cn=default indexes,cn=config,cn=ldbm database,cn=plugins,cn=config : {'objectclass': [b'top', b'nsIndex'], 'cn': [b'entryUUID'], 'nsSystemIndex': [b'false'], 'nsIndexType': [b'eq', b'pres']} DEBUG Config:_mapped_object.py:435 cn=config set REPLACE: ('nsslapd-rootpw', '********') DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389:__init__.py:1143 systemd status -> True DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389:__init__.py:1073 systemd status -> True DEBUG lib389:__init__.py:930 open(): Connecting to uri ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321/ DEBUG lib389:__init__.py:938 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:947 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:960 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:971 Using /etc/openldap/ldap.conf certificate policy DEBUG lib389:__init__.py:972 ldap.OPT_X_TLS_REQUIRE_CERT = 2 DEBUG lib389:__init__.py:1026 open(): bound as cn=Directory Manager DEBUG lib389:__init__.py:1689 Retrieving entry with [('',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.5 B2021.156.0143 ] DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389:__init__.py:1143 systemd status -> True DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389:__init__.py:1073 systemd status -> True DEBUG lib389:__init__.py:930 open(): Connecting to uri ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321/ DEBUG lib389:__init__.py:938 Using dirsrv ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:947 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:960 Using external ca certificate /etc/dirsrv/slapd-standalone DEBUG lib389:__init__.py:971 Using /etc/openldap/ldap.conf certificate policy DEBUG lib389:__init__.py:972 ldap.OPT_X_TLS_REQUIRE_CERT = 2 DEBUG lib389:__init__.py:1026 open(): bound as cn=Directory Manager DEBUG lib389:__init__.py:1689 Retrieving entry with [('',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: vendorVersion: 389-Directory/2.0.5 B2021.156.0143 ] DEBUG lib389.remove_ds:remove.py:38 Removing instance standalone DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config nsslapd-bakdir: /var/lib/dirsrv/slapd-standalone/bak ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config nsslapd-certdir: /etc/dirsrv/slapd-standalone ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=bdb,cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=bdb,cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-db-home-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config,cn=ldbm database,cn=plugins,cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config,cn=ldbm database,cn=plugins,cn=config nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config nsslapd-ldifdir: /var/lib/dirsrv/slapd-standalone/ldif ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config nsslapd-lockdir: /var/lock/dirsrv/slapd-standalone ] DEBUG lib389:__init__.py:1689 Retrieving entry with [('cn=config',)] DEBUG lib389:__init__.py:1699 Retrieved entry [dn: cn=config nsslapd-instancedir: /usr/lib64/dirsrv/slapd-standalone ] DEBUG lib389.remove_ds:remove.py:67 Checking for instance marker at /etc/dirsrv/slapd-standalone/dse.ldif DEBUG lib389.remove_ds:remove.py:72 Found instance marker at /etc/dirsrv/slapd-standalone/dse.ldif! Proceeding to remove ... DEBUG lib389.remove_ds:remove.py:76 Stopping instance standalone DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389:__init__.py:1143 systemd status -> True DEBUG lib389.remove_ds:remove.py:79 Found instance marker at /etc/dirsrv/slapd-standalone/dse.ldif! Proceeding to remove ... DEBUG lib389.remove_ds:remove.py:83 Stopping instance standalone DEBUG lib389:__init__.py:1170 systemd status -> True DEBUG lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/bak DEBUG lib389.remove_ds:remove.py:92 Removing /etc/dirsrv/slapd-standalone DEBUG lib389.remove_ds:remove.py:92 Removing /etc/dirsrv/slapd-standalone DEBUG lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db DEBUG lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db DEBUG lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/db/../ DEBUG lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/changelogdb DEBUG lib389.remove_ds:remove.py:92 Removing /var/lib/dirsrv/slapd-standalone/ldif DEBUG lib389.remove_ds:remove.py:92 Removing /var/lock/dirsrv/slapd-standalone DEBUG lib389.remove_ds:remove.py:92 Removing /var/log/dirsrv/slapd-standalone DEBUG lib389.remove_ds:remove.py:92 Removing /usr/lib64/dirsrv/slapd-standalone DEBUG lib389.remove_ds:remove.py:92 Removing /etc/sysconfig/dirsrv-standalone DEBUG lib389.remove_ds:remove.py:101 Removing the systemd symlink DEBUG lib389.remove_ds:remove.py:108 CMD: systemctl disable dirsrv@standalone ; STDOUT: ; STDERR: Removed /etc/systemd/system/multi-user.target.wants/dirsrv@standalone.service. DEBUG lib389.remove_ds:remove.py:110 Removing /etc/tmpfiles.d/dirsrv-standalone.conf DEBUG lib389.remove_ds:remove.py:119 Removing the port labels DEBUG lib389.remove_ds:remove.py:149 Moving /etc/dirsrv/slapd-standalone to /etc/dirsrv/slapd-standalone.removed DEBUG lib389.remove_ds:remove.py:159 Complete -----------------------------Captured log teardown------------------------------ DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone | |||
Passed | suites/setup_ds/dscreate_test.py::test_setup_ds_inf_minimal | 0.07 | |
-------------------------------Captured log setup------------------------------- DEBUG lib389:dscreate_test.py:42 Instance allocated DEBUG lib389:__init__.py:547 Allocate <class 'lib389.DirSrv'> with None DEBUG lib389:__init__.py:570 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321 DEBUG lib389:__init__.py:595 Allocate <class 'lib389.DirSrv'> with ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:54321 DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone -----------------------------Captured log teardown------------------------------ DEBUG lib389:__init__.py:735 list instance not found in /etc/dirsrv/slapd-standalone/dse.ldif: standalone | |||
Passed | suites/setup_ds/remove_test.py::test_basic[True] | 11.18 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/setup_ds/remove_test.py::test_basic[False] | 10.39 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr1-ldap.MOD_ADD-exp_values0-vucsn] | 24.53 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 699ef1ac-c154-4afc-b955-0ba644b4b7e7 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1ad99dde-e0a9-4b0b-9a26-8c967aa90e9a / got description=699ef1ac-c154-4afc-b955-0ba644b4b7e7) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr2-ldap.MOD_ADD-exp_values1-vucsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr3-ldap.MOD_ADD-exp_values2-vucsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr4-ldap.MOD_REPLACE-exp_values3-adcsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_desc_attr_state[description-Test1usr4-ldap.MOD_DELETE-exp_values4-vdcsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:99 Add user: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:107 Check if list of description attrs present for: state1test INFO tests.suites.state.mmt_state_test:mmt_state_test.py:110 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: state1test | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestCN1-ldap.MOD_ADD-exp_values0-vucsn] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestCN2-ldap.MOD_ADD-exp_values1-vucsn] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestnewCN3-ldap.MOD_REPLACE-exp_values2-adcsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:166 Check if list of cn attrs present for: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_cn_attr_state[cn-TestnewCN3-ldap.MOD_DELETE-None-None] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:153 Add user: TestCNusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:168 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: TestCNusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-Chinese-ldap.MOD_REPLACE-exp_values0-vucsn] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-French-ldap.MOD_ADD-None-None] | 0.36 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-German-ldap.MOD_REPLACE-exp_values2-adcsn] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_single_value_attr_state[preferredlanguage-German-ldap.MOD_DELETE-exp_values3-vdcsn] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:206 Add user: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:219 Check if list of cn attrs present for: Langusr1 INFO tests.suites.state.mmt_state_test:mmt_state_test.py:221 Checking for operational attributes INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: Langusr1 | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower1-ldap.MOD_ADD-exp_values0-vucsn] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower2-ldap.MOD_ADD-exp_values1-vucsn] | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower3-ldap.MOD_ADD-exp_values2-vucsn] | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower4-ldap.MOD_REPLACE-exp_values3-adcsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_subtype_attr_state[roomnumber;office-Tower4-ldap.MOD_DELETE-exp_values4-vucsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:276 Add user: roomoffice1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:285 Check if list of roomnumber;office attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:287 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: roomoffice1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef1-ldap.MOD_ADD-exp_values0-vucsn] | 0.12 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_ADD-exp_values1-vucsn] | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef3-ldap.MOD_ADD-exp_values2-vucsn] | 0.11 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_REPLACE-exp_values3-adcsn] | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/state/mmt_state_test.py::test_check_jpeg_attr_state[jpegphoto-thedeadbeef2-ldap.MOD_DELETE-exp_values4-vdcsn] | 1.88 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.state.mmt_state_test:mmt_state_test.py:342 Add user: testJpeg1usr INFO tests.suites.state.mmt_state_test:mmt_state_test.py:351 Check if list of jpeg attributes are present for a given entry INFO tests.suites.state.mmt_state_test:mmt_state_test.py:353 Checking if operational attributes are present for cn INFO tests.suites.state.mmt_state_test:mmt_state_test.py:36 Checking if operational attrs vucsn, adcsn and vdcsn present for: testJpeg1usr | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_syncrepl_basic | 12.51 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_mep | 24.04 | |
------------------------------Captured stdout call------------------------------ syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie | 50.18 | |
------------------------------Captured stdout call------------------------------ syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie_add_del | 37.08 | |
------------------------------Captured stdout call------------------------------ syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cookie_with_failure | 45.64 | |
------------------------------Captured stdout call------------------------------ syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} | |||
Passed | suites/syncrepl_plugin/basic_test.py::test_sync_repl_cenotaph | 43.89 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2649a0de-2003-42f2-a077-57932717c52c / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 3a49244f-f084-4980-8272-f918b1535bdc / got description=2649a0de-2003-42f2-a077-57932717c52c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stdout call------------------------------ syncrepl_poll: LDAP error (%s) {'result': -1, 'desc': "Can't contact LDAP server", 'ctrls': []} | |||
Passed | suites/syncrepl_plugin/openldap_test.py::test_syncrepl_openldap | 20.52 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/syntax/acceptance_test.py::test_valid | 12.46 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:55 Clean the error log INFO lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:59 Attempting to add task entry... INFO lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:66 Found 0 invalid entries - Success | |||
Passed | suites/syntax/acceptance_test.py::test_invalid_uidnumber | 3.29 | |
-------------------------------Captured log call-------------------------------- INFO lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:92 Clean the error log INFO lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:99 Attempting to add task entry... INFO lib389.topologies.tests.suites.syntax.acceptance_test:acceptance_test.py:106 Found an invalid entry with wrong uidNumber - Success | |||
Passed | suites/syntax/acceptance_test.py::test_invalid_dn_syntax_crash | 3.80 | |
No log output captured. | |||
Passed | suites/syntax/acceptance_test.py::test_dn_syntax_spaces_delete[props0-cn=\20leadingSpace,ou=Groups,dc=example,dc=com] | 4.60 | |
No log output captured. | |||
Passed | suites/syntax/acceptance_test.py::test_dn_syntax_spaces_delete[props1-cn=trailingSpace\20,ou=Groups,dc=example,dc=com] | 7.04 | |
No log output captured. | |||
Passed | suites/syntax/mr_test.py::test_sss_mr | 16.48 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.syntax.mr_test:mr_test.py:42 Creating LDIF... INFO tests.suites.syntax.mr_test:mr_test.py:47 Importing LDIF... INFO tests.suites.syntax.mr_test:mr_test.py:52 Search using server side sorting using undefined mr in the attr... INFO tests.suites.syntax.mr_test:mr_test.py:62 Test PASSED | |||
Passed | suites/tls/cipher_test.py::test_long_cipher_list | 24.07 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/tls/ssl_version_test.py::test_ssl_version_range | 47.14 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.tls.ssl_version_test:ssl_version_test.py:38 default min: TLS1.2 max: TLS1.3 | |||
Passed | suites/tls/tls_cert_namespace_test.py::test_pem_cert_in_private_namespace | 16.21 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:55 Enable TLS INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:58 Checking PrivateTmp value INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:60 Command used : systemctl show -p PrivateTmp dirsrv@standalone1.service INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:64 Check files in private /tmp INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:69 Check that Self-Signed-CA.pem is present in private /tmp INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:69 Check that Server-Cert-Key.pem is present in private /tmp INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:69 Check that Server-Cert.pem is present in private /tmp INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:72 Check instance cert directory INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:76 Check that Self-Signed-CA.pem is not present in /etc/dirsrv/slapd-standalone1/ directory INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:76 Check that Server-Cert-Key.pem is not present in /etc/dirsrv/slapd-standalone1/ directory INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:76 Check that Server-Cert.pem is not present in /etc/dirsrv/slapd-standalone1/ directory | |||
Passed | suites/tls/tls_cert_namespace_test.py::test_cert_category_authority | 10.45 | |
-------------------------------Captured log call-------------------------------- INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:107 Enable TLS INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:110 Get certificate path INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:116 Check that Self-Signed-CA.pem is present INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:120 Trust the certificate INFO tests.suites.tls.tls_cert_namespace_test:tls_cert_namespace_test.py:123 Search if our certificate has category: authority | |||
Passed | suites/tls/tls_check_crl_test.py::test_tls_check_crl | 18.39 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/tls/tls_ldaps_only_test.py::test_tls_ldaps_only | 29.89 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/upgrade/upgrade_repl_plugin_test.py::test_repl_plugin_name_change | 19.43 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | suites/vlv/regression_test.py::test_bulk_import_when_the_backend_with_vlv_was_recreated | 46.21 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3d0a2ee7-a477-4131-bcc2-52e0e7e4d659 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 138d2413-595f-4fa0-a658-71b6aaa6a8bf / got description=3d0a2ee7-a477-4131-bcc2-52e0e7e4d659) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists ------------------------------Captured stdout call------------------------------ deleting vlv search: cn=vlvSrch,cn=userRoot,cn=ldbm database,cn=plugins,cn=config deleting vlv search entry... -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=dc=example,dc=com INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=userRoot,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=userRoot,cn=ldbm database,cn=plugins,cn=config cn: userRoot nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-supplier2/db/userRoot nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=example,dc=com objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="dc=example,dc=com",cn=mapping tree,cn=config cn: dc=example,dc=com nsslapd-backend: userRoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config cn: dc=example,dc=com nsslapd-backend: userRoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2e78804d-7e89-439c-84ed-4d9cd80b6776 / got description=138d2413-595f-4fa0-a658-71b6aaa6a8bf) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 2e78804d-7e89-439c-84ed-4d9cd80b6776 / got description=138d2413-595f-4fa0-a658-71b6aaa6a8bf) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c1864892-925b-47da-892d-b7ebfb5eb12c / got description=2e78804d-7e89-439c-84ed-4d9cd80b6776) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b3102d84-f26a-4be0-8296-9e8061bef60f / got description=c1864892-925b-47da-892d-b7ebfb5eb12c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a28f332b-80fa-4ebb-953b-0a5e42016b44 / got description=b3102d84-f26a-4be0-8296-9e8061bef60f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working | |||
Passed | tickets/ticket47560_test.py::test_ticket47560 | 27.09 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:919 fixupMemberOf task fixupmemberof_06052021_011014 for basedn dc=example,dc=com completed successfully | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_init | 23.83 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 26cb8dba-356f-46f7-8168-0dd54adb5bb0 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:123 test_ticket47573_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7ff99b5ff2e0> (supplier <lib389.DirSrv object at 0x7ff99b7ccdf0>, consumer <lib389.DirSrv object at 0x7ff99b7cc3d0> | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_one | 1.27 | |
-------------------------------Captured log call-------------------------------- DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:147 test_ticket47573_one topology_m1c1 <lib389.topologies.TopologyMain object at 0x7ff99b5ff2e0> (supplier <lib389.DirSrv object at 0x7ff99b7ccdf0>, consumer <lib389.DirSrv object at 0x7ff99b7cc3d0> DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:113 trigger_schema_push: receive 0 (expected 1) DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:161 test_ticket47573_one supplier_schema_csn=b'60bb075a000000000000' DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:162 ctest_ticket47573_one onsumer_schema_csn=b'60bb075a000000000000' DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:47 _pattern_errorlog: start at offset 0 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [36] 389-Directory/2.0.5 B2021.156.0143 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [95] localhost.localdomain:39001 (/etc/dirsrv/slapd-supplier1) DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [96] DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [198] [05/Jun/2021:01:10:28.502736618 -0400] - INFO - main - 389-Directory/2.0.5 B2021.156.0143 starting up DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [306] [05/Jun/2021:01:10:28.505764551 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [398] [05/Jun/2021:01:10:28.511448579 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [490] [05/Jun/2021:01:10:28.514948216 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [598] [05/Jun/2021:01:10:29.307090710 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [712] [05/Jun/2021:01:10:29.313654989 -0400] - INFO - bdb_config_upgrade_dse_info - create config entry from old config DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [814] [05/Jun/2021:01:10:29.321742151 -0400] - NOTICE - bdb_start_autotune - found 7977308k physical memory DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [910] [05/Jun/2021:01:10:29.325680569 -0400] - NOTICE - bdb_start_autotune - found 7297672k available DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1017] [05/Jun/2021:01:10:29.328521609 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498581k DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1120] [05/Jun/2021:01:10:29.331513629 -0400] - NOTICE - bdb_start_autotune - total cache size: 408438169 B; DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1256] [05/Jun/2021:01:10:29.574270338 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1383] [05/Jun/2021:01:10:29.578840887 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-supplier1.socket for LDAPI requests DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1542] [05/Jun/2021:01:10:29.846339749 -0400] - INFO - postop_modify_config_dse - The change of nsslapd-securePort will not take effect until the server is restarted DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1659] [05/Jun/2021:01:10:29.861811994 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1837] [05/Jun/2021:01:10:30.328470708 -0400] - INFO - op_thread_cleanup - slapd shutting down - signaling operation threads - op stack size 2 max work q size 1 max work q stack size 1 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [1967] [05/Jun/2021:01:10:30.334163925 -0400] - INFO - slapd_daemon - slapd shutting down - closing down internal subsystems and plugins DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2070] [05/Jun/2021:01:10:30.341064550 -0400] - INFO - bdb_pre_close - Waiting for 5 database threads to stop DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2167] [05/Jun/2021:01:10:32.031656555 -0400] - INFO - bdb_pre_close - All database threads now stopped DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2278] [05/Jun/2021:01:10:32.058170528 -0400] - INFO - ldbm_back_instance_set_destructor - Set of instances destroyed DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2439] [05/Jun/2021:01:10:32.062049077 -0400] - INFO - connection_post_shutdown_cleanup - slapd shutting down - freed 1 work q stack objects - freed 2 op stack objects DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2509] [05/Jun/2021:01:10:32.065544934 -0400] - INFO - main - slapd stopped. DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2611] [05/Jun/2021:01:10:33.345395513 -0400] - INFO - main - 389-Directory/2.0.5 B2021.156.0143 starting up DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2719] [05/Jun/2021:01:10:33.351366006 -0400] - INFO - main - Setting the maximum file descriptor limit to: 524288 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2811] [05/Jun/2021:01:10:33.358648265 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [2903] [05/Jun/2021:01:10:33.363030772 -0400] - ERR - allow_operation - Component identity is NULL DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3011] [05/Jun/2021:01:10:34.144250623 -0400] - INFO - PBKDF2_SHA256 - Based on CPU performance, chose 2048 rounds DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3128] [05/Jun/2021:01:10:34.149871303 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3230] [05/Jun/2021:01:10:34.156409292 -0400] - NOTICE - bdb_start_autotune - found 7977308k physical memory DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3326] [05/Jun/2021:01:10:34.159827116 -0400] - NOTICE - bdb_start_autotune - found 7297440k available DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3433] [05/Jun/2021:01:10:34.163156170 -0400] - NOTICE - bdb_start_autotune - cache autosizing: db cache: 498581k DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3563] [05/Jun/2021:01:10:34.166665036 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot entry cache (1 total): 1376256k DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3689] [05/Jun/2021:01:10:34.170462848 -0400] - NOTICE - bdb_start_autotune - cache autosizing: userRoot dn cache (1 total): 196608k DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3793] [05/Jun/2021:01:10:34.173802408 -0400] - NOTICE - bdb_start_autotune - total cache size: 1834501529 B; DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [3929] [05/Jun/2021:01:10:34.272772171 -0400] - INFO - slapd_daemon - slapd started. Listening on All Interfaces port 39001 for LDAP requests DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4056] [05/Jun/2021:01:10:34.277324888 -0400] - INFO - slapd_daemon - Listening on /var/run/slapd-supplier1.socket for LDAPI requests DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4244] [05/Jun/2021:01:10:42.305569591 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4424] [05/Jun/2021:01:10:42.308674628 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4612] [05/Jun/2021:01:10:42.311209542 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding the replication changelog RUV, this may take several minutes... DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4792] [05/Jun/2021:01:10:42.314099441 -0400] - NOTICE - NSMMReplicationPlugin - changelog program - _cl5ConstructRUV - Rebuilding replication changelog RUV complete. Result 0 (Success) DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [4962] [05/Jun/2021:01:10:43.036014982 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Beginning total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-64:39201)". DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5146] [05/Jun/2021:01:10:43.043807898 -0400] - NOTICE - NSMMReplicationPlugin - replica_subentry_check - Need to create replication keep alive entry <cn=repl keep alive 1,dc=example,dc=com> DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5291] [05/Jun/2021:01:10:43.046774781 -0400] - INFO - NSMMReplicationPlugin - replica_subentry_create - add dn: cn=repl keep alive 1,dc=example,dc=com DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5308] objectclass: top DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5334] objectclass: ldapsubentry DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5364] objectclass: extensibleObject DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5386] cn: repl keep alive 1 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5572] [05/Jun/2021:01:10:45.619501925 -0400] - INFO - NSMMReplicationPlugin - repl5_tot_run - Finished total update of replica "agmt="cn=temp_201" (ci-vm-10-0-139-64:39201)". Sent 16 entries. DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5572] DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:59 _pattern_errorlog: end at offset 5572 | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_two | 1.33 | |
-------------------------------Captured log call-------------------------------- DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:113 trigger_schema_push: receive b'1' (expected 2) DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:192 test_ticket47573_two supplier_schema_csn=b'60bb075b000000000000' DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:193 test_ticket47573_two consumer_schema_csn=b'60bb075b000000000000' DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:47 _pattern_errorlog: start at offset 5573 DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:54 _pattern_errorlog: [5573] DEBUG tests.tickets.ticket47573_test:ticket47573_test.py:59 _pattern_errorlog: end at offset 5573 | |||
Passed | tickets/ticket47573_test.py::test_ticket47573_three | 4.35 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47573_test:ticket47573_test.py:228 Testcase PASSED | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_init | 30.04 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect cb63bed2-c476-4e8d-aec0-b3d747a9904d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47619_test.py:48 test_ticket47619_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7ff99b856dc0> INFO lib389:ticket47619_test.py:61 test_ticket47619_init: 100 entries ADDed other_entry[0..99] | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_create_index | 3.55 | |
-------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog INFO lib389:backend.py:80 List backend with suffix=cn=changelog | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_reindex | 16.67 | |
-------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:837 Index task index_attrs_06052021_011131 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011133 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011135 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011137 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011139 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011141 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011143 completed successfully INFO lib389:tasks.py:837 Index task index_attrs_06052021_011145 completed successfully | |||
Passed | tickets/ticket47619_test.py::test_ticket47619_check_indexed_search | 2.83 | |
No log output captured. | |||
Passed | tickets/ticket47640_test.py::test_ticket47640 | 10.30 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47640_test:ticket47640_test.py:65 Add operation correctly rejected. INFO tests.tickets.ticket47640_test:ticket47640_test.py:75 Test complete | |||
Passed | tickets/ticket47653MMR_test.py::test_ticket47653_init | 25.63 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect a1d3ef14-eb41-41e0-9923-07fa7284ee90 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect bf9f799b-84d3-4dfa-a963-0b40be181de0 / got description=a1d3ef14-eb41-41e0-9923-07fa7284ee90) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47653MMR_test.py:72 Add OCticket47653 that allows 'member' attribute INFO lib389:ticket47653MMR_test.py:77 Add cn=bind_entry, dc=example,dc=com | |||
Passed | tickets/ticket47653MMR_test.py::test_ticket47653_add | 5.27 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47653MMR_test.py:114 ######################### ADD ###################### INFO lib389:ticket47653MMR_test.py:117 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:ticket47653MMR_test.py:147 Try to add Add cn=test_entry, dc=example,dc=com (aci is missing): dn: cn=test_entry, dc=example,dc=com cn: test_entry member: cn=bind_entry, dc=example,dc=com objectclass: top objectclass: person objectclass: OCticket47653 postalAddress: here postalCode: 1234 sn: test_entry INFO lib389:ticket47653MMR_test.py:151 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:ticket47653MMR_test.py:155 Bind as cn=Directory Manager and add the ADD SELFDN aci INFO lib389:ticket47653MMR_test.py:168 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:ticket47653MMR_test.py:173 Try to add Add cn=test_entry, dc=example,dc=com (member is missing) INFO lib389:ticket47653MMR_test.py:181 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:ticket47653MMR_test.py:188 Try to add Add cn=test_entry, dc=example,dc=com (with several member values) INFO lib389:ticket47653MMR_test.py:191 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:ticket47653MMR_test.py:195 Try to add Add cn=test_entry, dc=example,dc=com should be successful INFO lib389:ticket47653MMR_test.py:206 Try to retrieve cn=test_entry, dc=example,dc=com from Supplier2 INFO lib389:ticket47653MMR_test.py:218 Update cn=test_entry, dc=example,dc=com on M2 | |||
Passed | tickets/ticket47653MMR_test.py::test_ticket47653_modify | 9.82 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47653MMR_test.py:248 Bind as cn=bind_entry, dc=example,dc=com INFO lib389:ticket47653MMR_test.py:251 ######################### MODIFY ###################### INFO lib389:ticket47653MMR_test.py:255 Try to modify cn=test_entry, dc=example,dc=com (aci is missing) INFO lib389:ticket47653MMR_test.py:259 Exception (expected): INSUFFICIENT_ACCESS INFO lib389:ticket47653MMR_test.py:263 Bind as cn=Directory Manager and add the WRITE SELFDN aci INFO lib389:ticket47653MMR_test.py:277 M1: Bind as cn=bind_entry, dc=example,dc=com INFO lib389:ticket47653MMR_test.py:282 M1: Try to modify cn=test_entry, dc=example,dc=com. It should succeeds INFO lib389:ticket47653MMR_test.py:286 M1: Bind as cn=Directory Manager INFO lib389:ticket47653MMR_test.py:289 M1: Check the update of cn=test_entry, dc=example,dc=com INFO lib389:ticket47653MMR_test.py:295 M2: Bind as cn=Directory Manager INFO lib389:ticket47653MMR_test.py:297 M2: Try to retrieve cn=test_entry, dc=example,dc=com INFO lib389:ticket47653MMR_test.py:311 M2: Update cn=test_entry, dc=example,dc=com (bound as cn=bind_entry, dc=example,dc=com) INFO lib389:ticket47653MMR_test.py:329 M1: Bind as cn=Directory Manager INFO lib389:ticket47653MMR_test.py:331 M1: Check cn=test_entry, dc=example,dc=com.postalCode=1929) | |||
Passed | tickets/ticket47676_test.py::test_ticket47676_init | 25.63 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 60471be6-d9e3-482b-a2c3-c926524dd5d8 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c69a5b87-2445-4b56-b9a7-4e5d2a3383f4 / got description=60471be6-d9e3-482b-a2c3-c926524dd5d8) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47676_test.py:88 Add OCticket47676 that allows 'member' attribute INFO lib389:ticket47676_test.py:93 Add cn=bind_entry, dc=example,dc=com | |||
Passed | tickets/ticket47676_test.py::test_ticket47676_skip_oc_at | 3.46 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47676_test.py:120 ######################### ADD ###################### INFO lib389:ticket47676_test.py:123 Bind as cn=Directory Manager and add the add the entry with specific oc INFO lib389:ticket47676_test.py:140 Try to add Add cn=test_entry, dc=example,dc=com should be successful INFO lib389:ticket47676_test.py:147 Try to retrieve cn=test_entry, dc=example,dc=com from Supplier2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 1ba3986a-51ca-4a3b-9710-76073bda927d / got description=c69a5b87-2445-4b56-b9a7-4e5d2a3383f4) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389:ticket47676_test.py:152 Update cn=test_entry, dc=example,dc=com on M2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f6f649c4-3f21-4657-bd56-c0c0777ddeb2 / got description=1ba3986a-51ca-4a3b-9710-76073bda927d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket47676_test.py::test_ticket47676_reject_action | 14.40 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47676_test.py:163 ######################### REJECT ACTION ###################### INFO lib389:ticket47676_test.py:177 Add OC2ticket47676 on M1 INFO lib389:ticket47676_test.py:182 Check OC2ticket47676 is in M1 INFO lib389:ticket47676_test.py:193 Update cn=test_entry, dc=example,dc=com on M1 INFO lib389:ticket47676_test.py:198 Check updated cn=test_entry, dc=example,dc=com on M2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 958094cc-79d0-4092-8a84-ebbe9faea907 / got description=f6f649c4-3f21-4657-bd56-c0c0777ddeb2) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389:ticket47676_test.py:205 Check OC2ticket47676 is not in M2 INFO lib389:ticket47676_test.py:215 ######################### NO MORE REJECT ACTION ###################### INFO lib389:ticket47676_test.py:226 Update cn=test_entry, dc=example,dc=com on M1 INFO lib389:ticket47676_test.py:231 Check updated cn=test_entry, dc=example,dc=com on M2 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 9d4bc9b9-f66c-4ea4-ab53-50325c2d3725 / got description=958094cc-79d0-4092-8a84-ebbe9faea907) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389:ticket47676_test.py:237 Check OC2ticket47676 is in M2 | |||
Passed | tickets/ticket47714_test.py::test_ticket47714_init | 7.95 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket47714_test.py:38 ############################################### INFO lib389:ticket47714_test.py:39 ####### INFO lib389:ticket47714_test.py:40 ####### Testing Ticket 47714 - [RFE] Update lastLoginTime also in Account Policy plugin if account lockout is based on passwordExpirationTime. INFO lib389:ticket47714_test.py:41 ####### INFO lib389:ticket47714_test.py:42 ############################################### INFO lib389.utils:ticket47714_test.py:55 ######################### Adding Account Policy entry: cn=Account Inactivation Policy,dc=example,dc=com ###################### INFO lib389.utils:ticket47714_test.py:60 ######################### Adding Test User entry: uid=ticket47714user,dc=example,dc=com ###################### | |||
Passed | tickets/ticket47714_test.py::test_ticket47714_run_0 | 10.78 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47714_test.py:38 ############################################### INFO lib389:ticket47714_test.py:39 ####### INFO lib389:ticket47714_test.py:40 ####### Account Policy - No new attr alwaysRecordLoginAttr in config INFO lib389:ticket47714_test.py:41 ####### INFO lib389:ticket47714_test.py:42 ############################################### INFO lib389.utils:ticket47714_test.py:96 ######################### Bind as uid=ticket47714user,dc=example,dc=com ###################### INFO lib389.utils:ticket47714_test.py:109 ######################### Bind as uid=ticket47714user,dc=example,dc=com again ###################### INFO lib389.utils:ticket47714_test.py:122 First lastLoginTime: b'20210605051337Z', Second lastLoginTime: b'20210605051339Z' INFO lib389.utils:ticket47714_test.py:133 ######################### cn=config,cn=Account Policy Plugin,cn=plugins,cn=config ###################### INFO lib389.utils:ticket47714_test.py:134 accountInactivityLimit: b'1' INFO lib389.utils:ticket47714_test.py:135 ######################### cn=config,cn=Account Policy Plugin,cn=plugins,cn=config DONE ###################### INFO lib389.utils:ticket47714_test.py:137 ######################### Bind as uid=ticket47714user,dc=example,dc=com again to fail ###################### INFO lib389.utils:ticket47714_test.py:141 CONSTRAINT VIOLATION Constraint violation INFO lib389.utils:ticket47714_test.py:142 uid=ticket47714user,dc=example,dc=com was successfully inactivated. | |||
Passed | tickets/ticket47714_test.py::test_ticket47714_run_1 | 8.20 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47714_test.py:38 ############################################### INFO lib389:ticket47714_test.py:39 ####### INFO lib389:ticket47714_test.py:40 ####### Account Policy - With new attr alwaysRecordLoginAttr in config INFO lib389:ticket47714_test.py:41 ####### INFO lib389:ticket47714_test.py:42 ############################################### INFO lib389.utils:ticket47714_test.py:179 ######################### Bind as uid=ticket47714user,dc=example,dc=com ###################### INFO lib389.utils:ticket47714_test.py:191 ######################### Bind as uid=ticket47714user,dc=example,dc=com again ###################### INFO lib389.utils:ticket47714_test.py:203 First lastLoginTime: b'20210605051346Z', Second lastLoginTime: b'20210605051348Z' INFO lib389:ticket47714_test.py:206 ticket47714 was successfully verified. | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_init | 25.58 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 64cbe791-8901-427f-8a80-6e88bbd65813 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 9a5aad78-ad7a-4caf-bff7-2524e659fb4a / got description=64cbe791-8901-427f-8a80-6e88bbd65813) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47721_test.py:98 Add cn=bind_entry, dc=example,dc=com | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_0 | 1.21 | |
-------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 66b60bcc-630a-49e5-ae21-e0dfdff3a17b / got description=9a5aad78-ad7a-4caf-bff7-2524e659fb4a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_1 | 3.57 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47721_test:ticket47721_test.py:127 Running test 1... INFO lib389:ticket47721_test.py:132 Add (M2) b"( ATticket47721-oid NAME 'ATticket47721' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" INFO lib389:ticket47721_test.py:136 Chg (M2) b"( 2.16.840.1.113730.3.1.569 NAME 'cosPriority' DESC 'Netscape defined attribute type' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-ORIGIN 'Netscape Directory Server' )" INFO lib389:ticket47721_test.py:140 Add (M2) b"( OCticket47721-oid NAME 'OCticket47721' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" INFO lib389:ticket47721_test.py:144 Chg (M2) b"( 5.3.6.1.1.1.2.0 NAME 'trustAccount' DESC 'Sets trust accounts information' SUP top AUXILIARY MUST trustModel MAY ( accessTo $ ou ) X-ORIGIN 'nss_ldap/pam_ldap' )" INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 65cca69d-9cfd-4bc9-a414-cd695ddb2b2c / got description=66b60bcc-630a-49e5-ae21-e0dfdff3a17b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:158 Supplier 1 schemaCSN: b'60bb082b000000000000' DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:159 Supplier 2 schemaCSN: b'60bb082b000000000000' | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_2 | 3.32 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47721_test:ticket47721_test.py:163 Running test 2... INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ff1455b3-f8cd-4f8f-b143-e91c6673ee8a / got description=65cca69d-9cfd-4bc9-a414-cd695ddb2b2c) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:176 Supplier 1 schemaCSN: b'60bb082b000000000000' DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:177 Supplier 2 schemaCSN: b'60bb082b000000000000' | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_3 | 13.63 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47721_test:ticket47721_test.py:195 Running test 3... INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:ticket47721_test.py:203 Update schema (M2) b"( ATtest3-oid NAME 'ATtest3' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" INFO lib389:ticket47721_test.py:208 Update schema (M2) b"( OCtest3-oid NAME 'OCtest3' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0e4e36fe-21af-4711-8221-0842fa5b9a64 / got description=ff1455b3-f8cd-4f8f-b143-e91c6673ee8a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:223 Supplier 1 schemaCSN: b'60bb082b000000000000' DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:224 Supplier 2 schemaCSN: b'60bb0837000000000000' | |||
Passed | tickets/ticket47721_test.py::test_ticket47721_4 | 6.42 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47721_test:ticket47721_test.py:245 Running test 4... INFO lib389:ticket47721_test.py:248 Update schema (M1) b"( ATtest4-oid NAME 'ATtest4' DESC 'test AT ticket 47721' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15 X-ORIGIN ( 'Test 47721' 'user defined' ) )" INFO lib389:ticket47721_test.py:252 Update schema (M1) b"( OCtest4-oid NAME 'OCtest4' DESC 'An group of related automount objects' SUP top STRUCTURAL MUST ou X-ORIGIN 'draft-howard-rfc2307bis' )" INFO lib389:ticket47721_test.py:255 trigger replication M1->M2: to update the schema INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect ba4d21db-5a93-43e2-b13d-c877902b9e1b / got description=0e4e36fe-21af-4711-8221-0842fa5b9a64) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389:ticket47721_test.py:264 trigger replication M1->M2: to push the schema INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 31c16da2-90f0-466e-8543-e09f732fe278 / got description=ba4d21db-5a93-43e2-b13d-c877902b9e1b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:276 Supplier 1 schemaCSN: b'60bb083f000000000000' DEBUG tests.tickets.ticket47721_test:ticket47721_test.py:277 Supplier 2 schemaCSN: b'60bb083f000000000000' | |||
Passed | tickets/ticket47787_test.py::test_ticket47787_init | 27.13 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0c7e645e-9008-4772-919b-413f102a9256 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 62ea74f4-614a-4ca0-a792-1dbdd1bd531b / got description=0c7e645e-9008-4772-919b-413f102a9256) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47787_test.py:294 ######################### INITIALIZATION ###################### INFO lib389:ticket47787_test.py:297 Add cn=bind_entry,dc=example,dc=com INFO lib389:ticket47787_test.py:305 Add cn=staged user,dc=example,dc=com INFO lib389:ticket47787_test.py:312 Add cn=accounts,dc=example,dc=com | |||
Passed | tickets/ticket47787_test.py::test_ticket47787_2 | 21.19 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47787_test.py:69 ############################################### INFO lib389:ticket47787_test.py:70 ####### INFO lib389:ticket47787_test.py:71 ####### test_ticket47787_2 INFO lib389:ticket47787_test.py:72 ####### INFO lib389:ticket47787_test.py:73 ############################################### INFO lib389:ticket47787_test.py:59 Bind as cn=Directory Manager INFO lib389:ticket47787_test.py:59 Bind as cn=Directory Manager INFO lib389:ticket47787_test.py:159 ######################### Pause RA M1<->M2 ###################### INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:ticket47787_test.py:209 ######################### MOD cn=new_account18,cn=staged user,dc=example,dc=com (M2) ###################### INFO lib389:ticket47787_test.py:200 ######################### DELETE new_account1 (M1) ###################### INFO lib389:ticket47787_test.py:209 ######################### MOD cn=new_account18,cn=staged user,dc=example,dc=com (M2) ###################### INFO lib389:ticket47787_test.py:209 ######################### MOD cn=new_account19,cn=staged user,dc=example,dc=com (M2) ###################### INFO lib389:ticket47787_test.py:209 ######################### MOD cn=new_account1,cn=staged user,dc=example,dc=com (M2) ###################### INFO lib389:ticket47787_test.py:209 ######################### MOD cn=new_account19,cn=staged user,dc=example,dc=com (M2) ###################### INFO lib389:ticket47787_test.py:170 ######################### resume RA M1<->M2 ###################### INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:ticket47787_test.py:388 ######################### Check DEL replicated on M2 ###################### INFO lib389:ticket47787_test.py:79 ######################### Tombstone on M1 ###################### INFO lib389:ticket47787_test.py:92 ######################### Tombstone on M2 ###################### INFO lib389:ticket47787_test.py:96 ######################### Description ###################### DEL M1 - MOD M2 INFO lib389:ticket47787_test.py:97 M1 only INFO lib389:ticket47787_test.py:108 M2 only INFO lib389:ticket47787_test.py:119 M1 differs M2 INFO lib389:ticket47787_test.py:409 ######################### Check MOD replicated on M1 ###################### | |||
Passed | tickets/ticket47808_test.py::test_ticket47808_run | 13.93 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket47808_test.py:36 Bind as cn=Directory Manager INFO lib389:ticket47808_test.py:39 ######################### SETUP ATTR UNIQ PLUGIN ###################### INFO lib389:ticket47808_test.py:46 ######################### ADD USER 1 ###################### INFO lib389:ticket47808_test.py:55 Try to add Add dn: cn=test_entry 1, dc=example,dc=com cn: test_entry 1 objectclass: top objectclass: person sn: test_entry : dn: cn=test_entry 1, dc=example,dc=com cn: test_entry 1 objectclass: top objectclass: person sn: test_entry INFO lib389:ticket47808_test.py:58 ######################### Restart Server ###################### INFO lib389:ticket47808_test.py:62 ######################### ADD USER 2 ###################### INFO lib389:ticket47808_test.py:71 Try to add Add dn: cn=test_entry 2, dc=example,dc=com cn: test_entry 2 objectclass: top objectclass: person sn: test_entry : dn: cn=test_entry 2, dc=example,dc=com cn: test_entry 2 objectclass: top objectclass: person sn: test_entry WARNING lib389:ticket47808_test.py:75 Adding cn=test_entry 2, dc=example,dc=com failed INFO lib389:ticket47808_test.py:78 ######################### IS SERVER UP? ###################### INFO lib389:ticket47808_test.py:81 Yes, it's up. INFO lib389:ticket47808_test.py:83 ######################### CHECK USER 2 NOT ADDED ###################### INFO lib389:ticket47808_test.py:84 Try to search cn=test_entry 2, dc=example,dc=com INFO lib389:ticket47808_test.py:88 Found none INFO lib389:ticket47808_test.py:90 ######################### DELETE USER 1 ###################### INFO lib389:ticket47808_test.py:92 Try to delete cn=test_entry 1, dc=example,dc=com | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_init | 20.01 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_one_container_add | 18.24 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (ADD) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:140 Uniqueness not enforced: create the entries INFO lib389:ticket47823_test.py:155 Uniqueness enforced: checks second entry is rejected INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:140 Uniqueness not enforced: create the entries INFO lib389:ticket47823_test.py:155 Uniqueness enforced: checks second entry is rejected | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_one_container_mod | 9.06 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MOD) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:193 Uniqueness enforced: checks MOD ADD entry is rejected INFO lib389:ticket47823_test.py:210 Uniqueness enforced: checks MOD REPLACE entry is rejected INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:193 Uniqueness enforced: checks MOD ADD entry is rejected INFO lib389:ticket47823_test.py:210 Uniqueness enforced: checks MOD REPLACE entry is rejected | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_one_container_modrdn | 9.24 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:237 Uniqueness enforced: checks MODRDN entry is rejected INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:237 Uniqueness enforced: checks MODRDN entry is rejected | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_multi_containers_add | 9.25 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (ADD) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_multi_containers_mod | 9.25 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MOD) on separated container INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:525 Uniqueness not enforced: if same 'cn' modified (add/replace) on separated containers INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) on separated container INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:531 Uniqueness not enforced: if same 'cn' modified (add/replace) on separated containers | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_multi_containers_modrdn | 9.81 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With former config (args), check attribute uniqueness with 'cn' (MODRDN) on separated containers INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### INFO lib389:ticket47823_test.py:545 Uniqueness not enforced: checks MODRDN entry is accepted on separated containers INFO lib389:ticket47823_test.py:548 Uniqueness not enforced: checks MODRDN entry is accepted on separated containers | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_add | 4.45 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (ADD) across several containers INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_mod | 4.90 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MOD) across several containers INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_modrdn | 4.36 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### With new config (args), check attribute uniqueness with 'cn' (MODRDN) across several containers INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_1 | 9.51 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (old): arg0 is missing INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_2 | 9.14 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (old): arg1 is missing INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_3 | 9.97 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (old): arg0 is missing but new config attrname exists INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_4 | 9.13 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (old): arg1 is missing but new config exist INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_5 | 9.13 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-attribute-name is missing INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_6 | 9.06 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-subtrees is missing INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47823_test.py::test_ticket47823_invalid_config_7 | 12.38 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47823_test.py:58 ############################################### INFO lib389:ticket47823_test.py:59 ####### INFO lib389:ticket47823_test.py:60 ####### Invalid config (new): uniqueness-subtrees are invalid INFO lib389:ticket47823_test.py:61 ####### INFO lib389:ticket47823_test.py:62 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_init | 12.58 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_0 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### NO exclude scope: Add an active entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_1 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### NO exclude scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_2 | 0.49 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### NO exclude scope: Add a staged entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_3 | 0.33 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### NO exclude scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_4 | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Exclude the provisioning container INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_5 | 0.25 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_6 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_7 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_8 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_9 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_10 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_11 | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Exclude (in addition) the dummy container INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_12 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_13 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_14 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_15 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_16 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an dummy entry and check its ALLOCATED_ATTR not is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_17 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning/Dummy excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_18 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Exclude PROVISIONING and a wrong container INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_19 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_20 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_21 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_22 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_23 | 0.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_24 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Provisioning+wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_25 | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Exclude a wrong container INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_26 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_27 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an active entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_28 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is not set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_29 | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add a staged entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_30 | 0.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is set INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47828_test.py::test_ticket47828_run_31 | 1.35 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47828_test.py:42 ############################################### INFO lib389:ticket47828_test.py:43 ####### INFO lib389:ticket47828_test.py:44 ####### Wrong container excluded scope: Add an dummy entry and check its ALLOCATED_ATTR is unchanged (!= magic) INFO lib389:ticket47828_test.py:45 ####### INFO lib389:ticket47828_test.py:46 ############################################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_init | 12.47 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_1 | 2.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an active user to an active group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_2 | 2.36 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an Active user to a Stage group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_3 | 2.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an Active user to a out of scope group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_1 | 2.09 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a Active group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_2 | 2.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a Stage group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_3 | 2.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an Stage user to a out of scope group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_out_user_1 | 2.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to an active group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_out_user_2 | 2.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to a Stage group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=stage group,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_out_user_3 | 2.08 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### MOD: add an out of scope user to an out of scope group INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=out guy,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=out group,cn=out,dc=example,dc=com: member ->b'cn=out guy,cn=out,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=out guy,cn=out,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=out group,cn=out,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_active_user_1 | 2.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to Active INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=xactive guy ###################### INFO lib389:ticket47829_test.py:96 !!!!!!! cn=xactive guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=xactive guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_stage_user_1 | 1.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to Stage INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_out_user_1 | 1.11 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active user to a Active group. Then move Active user to out of scope INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_modrdn_1 | 1.10 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Active INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_active_user_1 | 1.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Active INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Stage user to a Active group. Then move Stage user to Stage INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### Return because it requires a fix for 47833 INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_1 | 2.12 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1 INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:172 delete entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_2 | 1.15 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1. Then move active user to stage INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_3 | 1.11 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add active user to G1. Then move active user to out of the scope INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=active guy,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=active guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=active guy ###################### | |||
Passed | tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_4 | 2.36 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47829_test.py:62 ############################################### INFO lib389:ticket47829_test.py:63 ####### INFO lib389:ticket47829_test.py:64 ####### add an Active group (G1) to an active group (G0). Then add stage user to G1. Then move user to active. Then move it back INFO lib389:ticket47829_test.py:65 ####### INFO lib389:ticket47829_test.py:66 ############################################### INFO lib389:ticket47829_test.py:172 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:173 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO lib389:ticket47829_test.py:116 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:96 !!!!!!! cn=stage guy,cn=accounts,cn=in,dc=example,dc=com: memberof->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' INFO lib389:ticket47829_test.py:132 ######################### MODRDN cn=stage guy ###################### INFO lib389:ticket47829_test.py:116 !!!!!!! cn=indirect active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=active group,cn=accounts,cn=in,dc=example,dc=com' | |||
Passed | tickets/ticket47833_test.py::test_ticket47829_init | 12.58 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47833_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 | 2.48 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47833_test.py:58 ############################################### INFO lib389:ticket47833_test.py:59 ####### INFO lib389:ticket47833_test.py:60 ####### add an Stage user to a Active group. Then move Stage user to Stage INFO lib389:ticket47833_test.py:61 ####### INFO lib389:ticket47833_test.py:62 ############################################### INFO lib389:ticket47833_test.py:145 add entry cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com INFO lib389:ticket47833_test.py:146 to group cn=active group,cn=accounts,cn=in,dc=example,dc=com INFO lib389:ticket47833_test.py:112 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' INFO lib389:ticket47833_test.py:128 ######################### MODRDN cn=xstage guy ###################### INFO lib389:ticket47833_test.py:112 !!!!!!! cn=active group,cn=accounts,cn=in,dc=example,dc=com: member ->b'cn=stage guy,cn=staged users,cn=provisioning,cn=in,dc=example,dc=com' | |||
Passed | tickets/ticket47869MMR_test.py::test_ticket47869_init | 37.22 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect f6cbcbf1-ef6a-4602-bfbb-454391de0794 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c36660a2-cccb-4e00-a96b-b2f64bd611c3 / got description=f6cbcbf1-ef6a-4602-bfbb-454391de0794) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47869MMR_test.py:51 Add cn=bind_entry, dc=example,dc=com INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect e2e9fa98-15cf-4b94-a1e3-81b1dce7516d / got description=c36660a2-cccb-4e00-a96b-b2f64bd611c3) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect dff3aaa5-3414-4409-b220-4b4a06c0d3c5 / got description=e2e9fa98-15cf-4b94-a1e3-81b1dce7516d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 3232b048-e72b-419f-b753-18ecaa9307ab / got description=dff3aaa5-3414-4409-b220-4b4a06c0d3c5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 11a6e93b-d02a-4dbe-b10b-126e05d7627d / got description=3232b048-e72b-419f-b753-18ecaa9307ab) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 0ff178ab-70e2-4c71-90f7-32f6dda74174 / got description=11a6e93b-d02a-4dbe-b10b-126e05d7627d) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect af35404e-17ca-408c-9301-3cbeb6f0e058 / got description=0ff178ab-70e2-4c71-90f7-32f6dda74174) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 27527df4-6684-4dec-bd3f-453879dfb065 / got description=af35404e-17ca-408c-9301-3cbeb6f0e058) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 32432ef2-3e4e-4936-bd34-b254c2fd8b32 / got description=27527df4-6684-4dec-bd3f-453879dfb065) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 92d31e66-91ce-4021-8c4d-cf7f2d7a6b6b / got description=32432ef2-3e4e-4936-bd34-b254c2fd8b32) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect bce5edd7-61c6-4394-b2bb-9e3470939382 / got description=92d31e66-91ce-4021-8c4d-cf7f2d7a6b6b) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5b4a7c9f-cba8-447a-af71-ae580da0214b / got description=bce5edd7-61c6-4394-b2bb-9e3470939382) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket47869MMR_test.py::test_ticket47869_check | 3.62 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47869MMR_test.py:93 ######################### CHECK nscpentrywsi ###################### INFO lib389:ticket47869MMR_test.py:95 ##### Supplier1: Bind as cn=Directory Manager ##### INFO lib389:ticket47869MMR_test.py:98 Supplier1: Calling search_ext... INFO lib389:ticket47869MMR_test.py:102 27 results INFO lib389:ticket47869MMR_test.py:104 Results: INFO lib389:ticket47869MMR_test.py:106 dn: dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: ou=groups,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: ou=people,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: ou=services,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: uid=demo_user,ou=people,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=demo_group,ou=groups,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=group_admin,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=group_modify,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=user_admin,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=user_modify,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=user_private_read,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=replication_managers,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=bind_entry,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry0,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry1,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry2,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry3,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry4,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry5,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry6,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry7,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry8,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:106 dn: cn=test_entry9,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:110 Supplier1: count of nscpentrywsi: 27 INFO lib389:ticket47869MMR_test.py:112 ##### Supplier2: Bind as cn=Directory Manager ##### INFO lib389:ticket47869MMR_test.py:115 Supplier2: Calling search_ext... INFO lib389:ticket47869MMR_test.py:119 27 results INFO lib389:ticket47869MMR_test.py:121 Results: INFO lib389:ticket47869MMR_test.py:123 dn: dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: ou=groups,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: ou=people,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: ou=services,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=replication_managers,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=demo_group,ou=groups,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: uid=demo_user,ou=people,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=group_admin,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=group_modify,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=user_admin,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=user_modify,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=user_passwd_reset,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=user_private_read,ou=permissions,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63701,ou=services,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:63702,ou=services,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=bind_entry,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry0,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry1,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry2,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry3,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry4,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry5,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry6,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry7,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry8,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:123 dn: cn=test_entry9,dc=example,dc=com INFO lib389:ticket47869MMR_test.py:127 Supplier2: count of nscpentrywsi: 27 INFO lib389:ticket47869MMR_test.py:130 ##### Supplier1: Bind as cn=bind_entry, dc=example,dc=com ##### INFO lib389:ticket47869MMR_test.py:133 Supplier1: Calling search_ext... INFO lib389:ticket47869MMR_test.py:137 27 results INFO lib389:ticket47869MMR_test.py:143 Supplier1: count of nscpentrywsi: 0 INFO lib389:ticket47869MMR_test.py:146 ##### Supplier2: Bind as cn=bind_entry, dc=example,dc=com ##### INFO lib389:ticket47869MMR_test.py:149 Supplier2: Calling search_ext... INFO lib389:ticket47869MMR_test.py:153 27 results INFO lib389:ticket47869MMR_test.py:159 Supplier2: count of nscpentrywsi: 0 INFO lib389:ticket47869MMR_test.py:162 ##### Supplier1: Bind as anonymous ##### INFO lib389:ticket47869MMR_test.py:165 Supplier1: Calling search_ext... INFO lib389:ticket47869MMR_test.py:169 27 results INFO lib389:ticket47869MMR_test.py:175 Supplier1: count of nscpentrywsi: 0 INFO lib389:ticket47869MMR_test.py:178 ##### Supplier2: Bind as anonymous ##### INFO lib389:ticket47869MMR_test.py:181 Supplier2: Calling search_ext... INFO lib389:ticket47869MMR_test.py:185 27 results INFO lib389:ticket47869MMR_test.py:191 Supplier2: count of nscpentrywsi: 0 INFO lib389:ticket47869MMR_test.py:193 ##### ticket47869 was successfully verified. ##### | |||
Passed | tickets/ticket47871_test.py::test_ticket47871_init | 26.56 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 6e8d185c-3add-4b1e-9561-40343f50148d / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- INFO lib389:ticket47871_test.py:53 test_ticket47871_init topology_m1c1 <lib389.topologies.TopologyMain object at 0x7ff99c362220> | |||
Passed | tickets/ticket47871_test.py::test_ticket47871_1 | 1.35 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket47871_test.py:71 test_ticket47871_init: 10 entries ADDed other_entry[0..9] INFO lib389:ticket47871_test.py:78 Added entries are INFO lib389:ticket47871_test.py:80 changenumber=1,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=2,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=3,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=4,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=5,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=6,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=7,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=8,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=9,cn=changelog INFO lib389:ticket47871_test.py:80 changenumber=10,cn=changelog | |||
Passed | tickets/ticket47900_test.py::test_ticket47900 | 10.36 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket47900_test.py:52 Creating Password Administator entry cn=passwd_admin,dc=example,dc=com... INFO lib389:ticket47900_test.py:62 Configuring password policy... INFO lib389:ticket47900_test.py:74 Add aci to allow password admin to add/update entries... INFO lib389:ticket47900_test.py:87 Bind as the Password Administator (before activating)... INFO lib389:ticket47900_test.py:101 Attempt to add entries with invalid passwords, these adds should fail... INFO lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (2_Short)... INFO lib389:ticket47900_test.py:111 Add failed as expected: password (2_Short) result (Constraint violation) INFO lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (No_Number)... INFO lib389:ticket47900_test.py:111 Add failed as expected: password (No_Number) result (Constraint violation) INFO lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (N0Special)... INFO lib389:ticket47900_test.py:111 Add failed as expected: password (N0Special) result (Constraint violation) INFO lib389:ticket47900_test.py:105 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)... INFO lib389:ticket47900_test.py:111 Add failed as expected: password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) result (Constraint violation) INFO lib389:ticket47900_test.py:123 Activate the Password Administator... INFO lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (2_Short)... INFO lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (No_Number)... INFO lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password (N0Special)... INFO lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO lib389:ticket47900_test.py:139 Create a regular user entry cn=Joe Schmo,dc=example,dc=com with password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==)... INFO lib389:ticket47900_test.py:142 Succesfully added entry (cn=Joe Schmo,dc=example,dc=com) INFO lib389:ticket47900_test.py:155 Deactivate Password Administator and try invalid password updates... INFO lib389:ticket47900_test.py:177 Password update failed as expected: password (2_Short) result (Constraint violation) INFO lib389:ticket47900_test.py:177 Password update failed as expected: password (No_Number) result (Constraint violation) INFO lib389:ticket47900_test.py:177 Password update failed as expected: password (N0Special) result (Constraint violation) INFO lib389:ticket47900_test.py:177 Password update failed as expected: password ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) result (Constraint violation) INFO lib389:ticket47900_test.py:188 Activate Password Administator and try updates again... INFO lib389:ticket47900_test.py:205 Password update succeeded (2_Short) INFO lib389:ticket47900_test.py:205 Password update succeeded (No_Number) INFO lib389:ticket47900_test.py:205 Password update succeeded (N0Special) INFO lib389:ticket47900_test.py:205 Password update succeeded ({SSHA}bBy8UdtPZwu8uZna9QOYG3Pr41RpIRVDl8wddw==) | |||
Passed | tickets/ticket47920_test.py::test_ticket47920_init | 8.38 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47920_test.py::test_ticket47920_mod_readentry_ctrl | 1.90 | |
------------------------------Captured stdout call------------------------------ ['final description'] -------------------------------Captured log call-------------------------------- INFO lib389:ticket47920_test.py:65 ############################################### INFO lib389:ticket47920_test.py:66 ####### INFO lib389:ticket47920_test.py:67 ####### MOD: with a readentry control INFO lib389:ticket47920_test.py:68 ####### INFO lib389:ticket47920_test.py:69 ############################################### INFO lib389:ticket47920_test.py:106 Check the initial value of the entry | |||
Passed | tickets/ticket47921_test.py::test_ticket47921 | 10.38 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47921_test:ticket47921_test.py:81 Test complete | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_init | 12.98 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_one | 0.28 | |
-------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:100 test_ticket47927_one: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:111 test_ticket47927_one: Failed (expected) to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com: Constraint violation | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_two | 3.74 | |
No log output captured. | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_three | 0.28 | |
-------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:151 test_ticket47927_three: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:158 test_ticket47927_three: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_four | 0.09 | |
-------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:176 test_ticket47927_four: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:198 test_ticket47927_four: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_five | 4.18 | |
No log output captured. | |||
Passed | tickets/ticket47927_test.py::test_ticket47927_six | 2.47 | |
-------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:240 test_ticket47927_six: Failed (expected) to set the telephonenumber for cn=test_2,cn=enforced_container,dc=example,dc=com: Constraint violation CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:247 test_ticket47927_six: success to set the telephonenumber for cn=test_3,cn=excluded_container,dc=example,dc=com CRITICAL tests.tickets.ticket47927_test:ticket47927_test.py:256 test_ticket47927_six: success to set the telephonenumber for cn=test_4,cn=excluded_bis_container,dc=example,dc=com | |||
Passed | tickets/ticket47953_test.py::test_ticket47953 | 12.79 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:525 Import task import_06052021_012342 for file /var/lib/dirsrv/slapd-standalone1/ldif/ticket47953.ldif completed successfully | |||
Passed | tickets/ticket47963_test.py::test_ticket47963 | 17.37 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47963_test:ticket47963_test.py:145 Test complete | |||
Passed | tickets/ticket47970_test.py::test_ticket47970 | 10.42 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_init | 12.89 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_1 | 3.59 | |
No log output captured. | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_2 | 4.39 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47976_test:ticket47976_test.py:99 Test complete INFO tests.tickets.ticket47976_test:ticket47976_test.py:104 Export LDIF file... INFO lib389:tasks.py:597 Export task export_06052021_012431 for file /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif completed successfully INFO tests.tickets.ticket47976_test:ticket47976_test.py:115 Import LDIF file... INFO lib389:tasks.py:525 Import task import_06052021_012433 for file /var/lib/dirsrv/slapd-standalone1/ldif/export.ldif completed successfully | |||
Passed | tickets/ticket47976_test.py::test_ticket47976_3 | 3.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket47976_test:ticket47976_test.py:131 Testing if the delete will hang or not INFO tests.tickets.ticket47976_test:ticket47976_test.py:150 user0 was correctly deleted INFO tests.tickets.ticket47976_test:ticket47976_test.py:150 user1 was correctly deleted | |||
Passed | tickets/ticket47980_test.py::test_ticket47980 | 12.79 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket47981_test.py::test_ticket47981 | 12.74 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=o=netscaperoot INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=netscaperoot,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=netscaperoot,cn=ldbm database,cn=plugins,cn=config cn: netscaperoot nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/netscaperoot nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: o=netscaperoot objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="o=netscaperoot",cn=mapping tree,cn=config cn: o=netscaperoot nsslapd-backend: netscaperoot nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=o\3Dnetscaperoot,cn=mapping tree,cn=config cn: o=netscaperoot nsslapd-backend: netscaperoot nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_memberof | 12.88 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48005_test:ticket48005_test.py:86 Ticket 48005 memberof test... INFO lib389:tasks.py:919 fixupMemberOf task fixupmemberof_06052021_012555 for basedn dc=example,dc=com completed successfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:110 No core files are found INFO tests.tickets.ticket48005_test:ticket48005_test.py:119 Ticket 48005 memberof test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_automember | 19.98 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48005_test:ticket48005_test.py:138 Ticket 48005 automember test... INFO tests.tickets.ticket48005_test:ticket48005_test.py:143 Adding automember config INFO lib389:tasks.py:1034 Automember Rebuild Membership task(task-06052021_012608) completedsuccessfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:176 No core files are found INFO lib389:tasks.py:1090 Automember Export Updates task (task-06052021_012612) completed successfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:198 No core files are found INFO lib389:tasks.py:1141 Automember Map Updates task (task-06052021_012615) completed successfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:222 No core files are found INFO tests.tickets.ticket48005_test:ticket48005_test.py:231 Ticket 48005 automember test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_syntaxvalidate | 3.55 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48005_test:ticket48005_test.py:241 Ticket 48005 syntax validate test... INFO lib389:tasks.py:1321 Syntax Validate task (task-06052021_012623) completed successfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:261 No core files are found INFO tests.tickets.ticket48005_test:ticket48005_test.py:265 Ticket 48005 syntax validate test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_usn | 12.83 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48005_test:ticket48005_test.py:277 Ticket 48005 usn test... INFO tests.tickets.ticket48005_test:ticket48005_test.py:285 No user entries. INFO lib389:tasks.py:1373 USN tombstone cleanup task (task-06052021_012631) completed successfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:316 No core files are found INFO tests.tickets.ticket48005_test:ticket48005_test.py:324 Ticket 48005 usn test complete | |||
Passed | tickets/ticket48005_test.py::test_ticket48005_schemareload | 6.06 | |
------------------------------Captured stderr call------------------------------ ls: cannot access '/var/log/dirsrv/slapd-standalone1/core*': No such file or directory -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48005_test:ticket48005_test.py:334 Ticket 48005 schema reload test... INFO lib389:tasks.py:1229 Schema Reload task (task-06052021_012640) completed successfully INFO tests.tickets.ticket48005_test:ticket48005_test.py:354 No core files are found INFO tests.tickets.ticket48005_test:ticket48005_test.py:358 Ticket 48005 schema reload test complete | |||
Passed | tickets/ticket48026_test.py::test_ticket48026 | 14.91 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48026_test:ticket48026_test.py:114 Test complete | |||
Passed | tickets/ticket48109_test.py::test_ticket48109 | 38.18 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48109_test:ticket48109_test.py:32 Test case 0 INFO tests.tickets.ticket48109_test:ticket48109_test.py:77 match: conn=1 op=3 INFO tests.tickets.ticket48109_test:ticket48109_test.py:85 l1: [05/Jun/2021:01:27:24.027375193 -0400] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000265750 optime=0.000340315 etime=0.000603252 INFO tests.tickets.ticket48109_test:ticket48109_test.py:88 match: nentires=1 INFO tests.tickets.ticket48109_test:ticket48109_test.py:93 Entry uid=a* found. INFO tests.tickets.ticket48109_test:ticket48109_test.py:100 Test case 0 - OK - substr index used INFO tests.tickets.ticket48109_test:ticket48109_test.py:119 Test case 1 INFO tests.tickets.ticket48109_test:ticket48109_test.py:163 match: conn=1 op=3 INFO tests.tickets.ticket48109_test:ticket48109_test.py:171 l1: [05/Jun/2021:01:27:24.027375193 -0400] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000265750 optime=0.000340315 etime=0.000603252 INFO tests.tickets.ticket48109_test:ticket48109_test.py:174 match: nentires=1 INFO tests.tickets.ticket48109_test:ticket48109_test.py:179 Entry uid=*b found. INFO tests.tickets.ticket48109_test:ticket48109_test.py:186 Test case 1 - OK - substr index used INFO tests.tickets.ticket48109_test:ticket48109_test.py:208 Test case 2 INFO tests.tickets.ticket48109_test:ticket48109_test.py:259 match: conn=1 op=3 INFO tests.tickets.ticket48109_test:ticket48109_test.py:267 l1: [05/Jun/2021:01:27:24.027375193 -0400] conn=1 op=3 RESULT err=0 tag=101 nentries=1 wtime=0.000265750 optime=0.000340315 etime=0.000603252 INFO tests.tickets.ticket48109_test:ticket48109_test.py:270 match: nentires=1 INFO tests.tickets.ticket48109_test:ticket48109_test.py:275 Entry uid=c* found. INFO tests.tickets.ticket48109_test:ticket48109_test.py:282 Test case 2-1 - OK - correct substr index used INFO tests.tickets.ticket48109_test:ticket48109_test.py:294 match: conn=1 op=4 INFO tests.tickets.ticket48109_test:ticket48109_test.py:302 l1: [05/Jun/2021:01:27:42.341328077 -0400] conn=1 op=4 RESULT err=0 tag=101 nentries=1 wtime=0.000200099 optime=0.000284523 etime=0.000482513 INFO tests.tickets.ticket48109_test:ticket48109_test.py:305 match: nentires=1 INFO tests.tickets.ticket48109_test:ticket48109_test.py:310 Entry uid=*2 found. INFO tests.tickets.ticket48109_test:ticket48109_test.py:317 Test case 2-2 - OK - correct substr index used INFO tests.tickets.ticket48109_test:ticket48109_test.py:331 Testcase PASSED | |||
Passed | tickets/ticket48170_test.py::test_ticket48170 | 10.31 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48170_test:ticket48170_test.py:29 Index update correctly rejected INFO tests.tickets.ticket48170_test:ticket48170_test.py:36 Test complete | |||
Passed | tickets/ticket48194_test.py::test_init | 14.20 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Testing Ticket 48194 - harden the list of ciphers available by default INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:57 ######################### enable SSL in the directory server with all ciphers ###################### | |||
Passed | tickets/ticket48194_test.py::test_run_0 | 4.60 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 1 - Check the ciphers availability for "+all"; allowWeakCipher: on INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:131 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake successfully INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' INFO lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake successfully INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Passed | tickets/ticket48194_test.py::test_run_3 | 5.54 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 4 - Check the ciphers availability for "-all" INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:199 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' | |||
Passed | tickets/ticket48194_test.py::test_run_9 | 6.69 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 10 - Check no nsSSL3Ciphers (default setting) with no errorlog-level & allowWeakCipher on INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:316 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake successfully INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' INFO lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake successfully INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO lib389.utils:ticket48194_test.py:105 Found: b'New, TLSv1.3, Cipher is TLS_AES_128_GCM_SHA256\n' | |||
Passed | tickets/ticket48194_test.py::test_run_11 | 7.61 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48194_test.py:40 ############################################### INFO lib389:ticket48194_test.py:41 ####### Test Case 12 - Check nsSSL3Ciphers: +fortezza, which is not supported INFO lib389:ticket48194_test.py:42 ############################################### INFO lib389.utils:ticket48194_test.py:337 ######################### Restarting the server ###################### INFO lib389.utils:ticket48194_test.py:86 Testing DES-CBC3-SHA -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher DES-CBC3-SHA INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' INFO lib389.utils:ticket48194_test.py:86 Testing AES256-SHA256 -- expect to handshake failed INFO lib389.utils:ticket48194_test.py:92 Running cmdline: /usr/bin/openssl s_client -connect localhost:63601 -cipher AES256-SHA256 INFO lib389.utils:ticket48194_test.py:105 Found: b'New, (NONE), Cipher is (NONE)\n' | |||
Passed | tickets/ticket48212_test.py::test_ticket48212 | 23.02 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ /bin/sh: line 1: /usr/sbin/dbverify: No such file or directory /bin/sh: line 1: /usr/sbin/dbverify: No such file or directory /bin/sh: line 1: /usr/sbin/dbverify: No such file or directory /bin/sh: line 1: /usr/sbin/dbverify: No such file or directory -------------------------------Captured log call-------------------------------- INFO lib389:ticket48212_test.py:70 Bind as cn=Directory Manager INFO lib389:ticket48212_test.py:83 ######################### Import Test data (/var/lib/dirsrv/slapd-standalone1/ldif/example1k_posix.ldif) ###################### INFO lib389:tasks.py:525 Import task import_06052021_012927 for file /var/lib/dirsrv/slapd-standalone1/ldif/example1k_posix.ldif completed successfully INFO lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO lib389:ticket48212_test.py:43 dbverify passed INFO lib389:ticket48212_test.py:92 ######################### Add index by uidnumber ###################### INFO lib389:ticket48212_test.py:101 ######################### reindexing... ###################### INFO lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ INFO lib389:tasks.py:837 Index task index_attrs_06052021_012930 completed successfully INFO lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO lib389:ticket48212_test.py:43 dbverify passed INFO lib389:ticket48212_test.py:106 ######################### Add nsMatchingRule ###################### INFO lib389:ticket48212_test.py:112 ######################### reindexing... ###################### INFO lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ INFO lib389:tasks.py:837 Index task index_attrs_06052021_012933 completed successfully INFO lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO lib389:ticket48212_test.py:43 dbverify passed INFO lib389:ticket48212_test.py:117 ######################### Delete nsMatchingRule ###################### INFO lib389:ticket48212_test.py:47 +++++ reindex uidnumber +++++ INFO lib389:tasks.py:837 Index task index_attrs_06052021_012936 completed successfully INFO lib389:ticket48212_test.py:19 +++++ dbverify +++++ INFO lib389:ticket48212_test.py:23 Running /usr/sbin/dbverify -Z standalone1 -V INFO lib389:ticket48212_test.py:43 dbverify passed | |||
Passed | tickets/ticket48214_test.py::test_ticket48214_run | 10.43 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48214_test.py:83 Bind as cn=Directory Manager INFO lib389:ticket48214_test.py:86 ######################### Out of Box ###################### INFO lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ INFO lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ INFO lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif INFO lib389:ticket48214_test.py:28 Empty: INFO lib389:ticket48214_test.py:50 No nsslapd-maxbersize found in dse.ldif INFO lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'2097152' INFO lib389:ticket48214_test.py:72 Checking 2097152 vs 2097152 INFO lib389:ticket48214_test.py:89 ######################### Add nsslapd-maxbersize: 0 ###################### INFO lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ INFO lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ INFO lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif INFO lib389:ticket48214_test.py:35 Right format - nsslapd-maxbersize: 0 INFO lib389:ticket48214_test.py:52 nsslapd-maxbersize: 0 INFO lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'2097152' INFO lib389:ticket48214_test.py:72 Checking 2097152 vs 2097152 INFO lib389:ticket48214_test.py:93 ######################### Add nsslapd-maxbersize: 10000 ###################### INFO lib389:ticket48214_test.py:44 +++++ Check Max Ber Size +++++ INFO lib389:ticket48214_test.py:18 +++++ Get maxbersize from dse.ldif +++++ INFO lib389:ticket48214_test.py:21 Run CMD: egrep nsslapd-maxbersize /etc/dirsrv/slapd-standalone1/dse.ldif INFO lib389:ticket48214_test.py:35 Right format - nsslapd-maxbersize: 10000 INFO lib389:ticket48214_test.py:55 nsslapd-maxbersize: 10000 INFO lib389:ticket48214_test.py:63 ldapsearch returned nsslapd-maxbersize: b'10000' INFO lib389:ticket48214_test.py:98 ticket48214 was successfully verified. | |||
Passed | tickets/ticket48233_test.py::test_ticket48233 | 15.06 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48233_test:ticket48233_test.py:54 Test complete | |||
Passed | tickets/ticket48252_test.py::test_ticket48252_setup | 8.54 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket48265_test.py::test_ticket48265_test | 10.35 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48265_test:ticket48265_test.py:34 Adding 20 test entries... INFO tests.tickets.ticket48265_test:ticket48265_test.py:51 Search with Ticket 47521 type complex filter INFO tests.tickets.ticket48265_test:ticket48265_test.py:60 Search with Ticket 48265 type complex filter INFO tests.tickets.ticket48265_test:ticket48265_test.py:69 Test 48265 complete | |||
Passed | tickets/ticket48266_test.py::test_ticket48266_fractional | 43.41 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 27034ef5-1f61-46d7-bb65-bac4d803533e / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e373f8f3-7715-49a5-a2d5-ab5be2e5c73f / got description=27034ef5-1f61-46d7-bb65-bac4d803533e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2100cd6-e5b0-421c-bc36-2c144b000031 / got description=e373f8f3-7715-49a5-a2d5-ab5be2e5c73f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2100cd6-e5b0-421c-bc36-2c144b000031 / got description=e373f8f3-7715-49a5-a2d5-ab5be2e5c73f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2100cd6-e5b0-421c-bc36-2c144b000031 / got description=e373f8f3-7715-49a5-a2d5-ab5be2e5c73f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2100cd6-e5b0-421c-bc36-2c144b000031 / got description=e373f8f3-7715-49a5-a2d5-ab5be2e5c73f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2100cd6-e5b0-421c-bc36-2c144b000031 / got description=e373f8f3-7715-49a5-a2d5-ab5be2e5c73f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b2100cd6-e5b0-421c-bc36-2c144b000031 / got description=e373f8f3-7715-49a5-a2d5-ab5be2e5c73f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working | |||
Passed | tickets/ticket48266_test.py::test_ticket48266_check_repl_desc | 1.14 | |
No log output captured. | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_init | 9.19 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48270_test:ticket48270_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_homeDirectory_indexed_cis | 2.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48270_test:ticket48270_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO tests.tickets.ticket48270_test:ticket48270_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO lib389:tasks.py:837 Index task index_attrs_06052021_013135 completed successfully INFO tests.tickets.ticket48270_test:ticket48270_test.py:63 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_homeDirectory_mixed_value | 0.09 | |
No log output captured. | |||
Passed | tickets/ticket48270_test.py::test_ticket48270_extensible_search | 1.54 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48270_test:ticket48270_test.py:91 Default: can retrieve an entry filter syntax with exact stored value INFO tests.tickets.ticket48270_test:ticket48270_test.py:93 Default: can retrieve an entry filter caseExactIA5Match with exact stored value INFO tests.tickets.ticket48270_test:ticket48270_test.py:97 Default: can not retrieve an entry filter syntax match with lowered stored value INFO tests.tickets.ticket48270_test:ticket48270_test.py:103 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value INFO tests.tickets.ticket48270_test:ticket48270_test.py:110 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value | |||
Passed | tickets/ticket48272_test.py::test_ticket48272 | 19.86 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48272_test:ticket48272_test.py:129 Test PASSED | |||
Passed | tickets/ticket48294_test.py::test_48294_init | 8.26 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48294_test.py:31 ############################################### INFO lib389:ticket48294_test.py:32 ####### Testing Ticket 48294 - Linked Attributes plug-in - won't update links after MODRDN operation INFO lib389:ticket48294_test.py:33 ############################################### | |||
Passed | tickets/ticket48294_test.py::test_48294_run_0 | 0.13 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48294_test.py:31 ############################################### INFO lib389:ticket48294_test.py:32 ####### Case 0 - Rename employee1 and adjust the link type value by replace INFO lib389:ticket48294_test.py:33 ############################################### INFO lib389:ticket48294_test.py:59 ######################### MODRDN uid=employee2 ###################### | |||
Passed | tickets/ticket48294_test.py::test_48294_run_1 | 0.26 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48294_test.py:31 ############################################### INFO lib389:ticket48294_test.py:32 ####### Case 1 - Rename employee2 and adjust the link type value by delete and add INFO lib389:ticket48294_test.py:33 ############################################### INFO lib389:ticket48294_test.py:59 ######################### MODRDN uid=employee3 ###################### | |||
Passed | tickets/ticket48294_test.py::test_48294_run_2 | 1.94 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48294_test.py:31 ############################################### INFO lib389:ticket48294_test.py:32 ####### Case 2 - Rename manager1 to manager2 and make sure the managed attribute value is updated INFO lib389:ticket48294_test.py:33 ############################################### INFO lib389:ticket48294_test.py:59 ######################### MODRDN uid=manager2 ###################### | |||
Passed | tickets/ticket48295_test.py::test_48295_init | 8.43 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48295_test.py:30 ############################################### INFO lib389:ticket48295_test.py:31 ####### Testing Ticket 48295 - Entry cache is not rolled back -- Linked Attributes plug-in - wrong behaviour when adding valid and broken links INFO lib389:ticket48295_test.py:32 ############################################### | |||
Passed | tickets/ticket48295_test.py::test_48295_run | 1.90 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48295_test.py:30 ############################################### INFO lib389:ticket48295_test.py:31 ####### Add 2 linktypes to manager1 - one exists, another does not to make sure the managed entry does not have managed type. INFO lib389:ticket48295_test.py:32 ############################################### | |||
Passed | tickets/ticket48312_test.py::test_ticket48312 | 10.32 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48312_test:ticket48312_test.py:117 Test complete | |||
Passed | tickets/ticket48354_test.py::test_ticket48354 | 10.54 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48354_test:ticket48354_test.py:50 Test PASSED | |||
Passed | tickets/ticket48362_test.py::test_ticket48362 | 118.72 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect b6c39db2-af09-4c3d-b745-6d9ebe74c282 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect e9b4be23-2741-49b8-8bb5-ef00e26adeaf / got description=b6c39db2-af09-4c3d-b745-6d9ebe74c282) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48362_test:ticket48362_test.py:28 Add dna plugin config entry...ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 INFO tests.tickets.ticket48362_test:ticket48362_test.py:48 Enable the DNA plugin... INFO tests.tickets.ticket48362_test:ticket48362_test.py:55 Restarting the server... INFO tests.tickets.ticket48362_test:ticket48362_test.py:28 Add dna plugin config entry...ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO tests.tickets.ticket48362_test:ticket48362_test.py:48 Enable the DNA plugin... INFO tests.tickets.ticket48362_test:ticket48362_test.py:55 Restarting the server... INFO tests.tickets.ticket48362_test:ticket48362_test.py:83 ======================== Update dnaPortNum=39001 ============================ INFO tests.tickets.ticket48362_test:ticket48362_test.py:90 ======================== Update done INFO tests.tickets.ticket48362_test:ticket48362_test.py:83 ======================== Update dnaPortNum=39002 ============================ INFO tests.tickets.ticket48362_test:ticket48362_test.py:90 ======================== Update done INFO tests.tickets.ticket48362_test:ticket48362_test.py:132 ======================== BEFORE RESTART ============================ INFO tests.tickets.ticket48362_test:ticket48362_test.py:135 ======================== BEFORE RESTART ============================ INFO tests.tickets.ticket48362_test:ticket48362_test.py:141 ======================== BEFORE RESTART ============================ INFO tests.tickets.ticket48362_test:ticket48362_test.py:150 =================== AFTER RESTART ================================= INFO tests.tickets.ticket48362_test:ticket48362_test.py:153 =================== AFTER RESTART ================================= INFO tests.tickets.ticket48362_test:ticket48362_test.py:159 =================== AFTER RESTART ================================= INFO tests.tickets.ticket48362_test:ticket48362_test.py:162 Test complete | |||
Passed | tickets/ticket48366_test.py::test_ticket48366_init | 9.23 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48366_test.py:44 Add subtree: ou=green,dc=example,dc=com INFO lib389:ticket48366_test.py:48 Add subtree: ou=red,dc=example,dc=com INFO lib389:ticket48366_test.py:54 Add cn=test,ou=people,dc=example,dc=com INFO lib389:ticket48366_test.py:60 Add cn=proxy,ou=people,dc=example,dc=com INFO lib389.utils:ticket48366_test.py:90 Adding %d test entries... | |||
Passed | tickets/ticket48366_test.py::test_ticket48366_search_user | 0.14 | |
No log output captured. | |||
Passed | tickets/ticket48366_test.py::test_ticket48366_search_dm | 1.28 | |
No log output captured. | |||
Passed | tickets/ticket48370_test.py::test_ticket48370 | 10.47 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48370_test:ticket48370_test.py:187 Test PASSED | |||
Passed | tickets/ticket48383_test.py::test_ticket48383 | 56.10 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ ldiffile: /var/lib/dirsrv/slapd-standalone1/ldif/standalone1.ldif -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket48383_test:ticket48383_test.py:62 Failed to change nsslapd-cachememsize No such object INFO tests.tickets.ticket48383_test:ticket48383_test.py:88 Test complete | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_init | 8.94 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48497_test:ticket48497_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_homeDirectory_mixed_value | 0.07 | |
No log output captured. | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_extensible_search | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48497_test:ticket48497_test.py:49 Default: can retrieve an entry filter syntax with exact stored value INFO tests.tickets.ticket48497_test:ticket48497_test.py:51 Default: can retrieve an entry filter caseExactIA5Match with exact stored value INFO tests.tickets.ticket48497_test:ticket48497_test.py:55 Default: can not retrieve an entry filter syntax match with lowered stored value INFO tests.tickets.ticket48497_test:ticket48497_test.py:61 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value INFO tests.tickets.ticket48497_test:ticket48497_test.py:68 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_cfg | 0.08 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48497_test:ticket48497_test.py:73 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match | |||
Passed | tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_run | 3.99 | |
-------------------------------Captured log call-------------------------------- INFO lib389:tasks.py:837 Index task index_attrs_06052021_013909 completed successfully INFO tests.tickets.ticket48497_test:ticket48497_test.py:93 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48665_test.py::test_ticket48665 | 10.36 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48665_test:ticket48665_test.py:40 8 entries are returned from the server. CRITICAL tests.tickets.ticket48665_test:ticket48665_test.py:47 Failed to change nsslapd-cachememsize No such object INFO tests.tickets.ticket48665_test:ticket48665_test.py:52 8 entries are returned from the server. INFO tests.tickets.ticket48665_test:ticket48665_test.py:63 8 entries are returned from the server. INFO tests.tickets.ticket48665_test:ticket48665_test.py:65 Test complete | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_init | 8.85 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48745_test:ticket48745_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_homeDirectory_indexed_cis | 2.09 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48745_test:ticket48745_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO tests.tickets.ticket48745_test:ticket48745_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO lib389:tasks.py:837 Index task index_attrs_06052021_013948 completed successfully INFO tests.tickets.ticket48745_test:ticket48745_test.py:63 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_homeDirectory_mixed_value | 0.08 | |
No log output captured. | |||
Passed | tickets/ticket48745_test.py::test_ticket48745_extensible_search_after_index | 1.83 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48745_test:ticket48745_test.py:91 Default: can retrieve an entry filter syntax with exact stored value INFO tests.tickets.ticket48745_test:ticket48745_test.py:99 Default: can retrieve an entry filter caseExactIA5Match with exact stored value INFO tests.tickets.ticket48745_test:ticket48745_test.py:106 Default: can not retrieve an entry filter syntax match with lowered stored value INFO tests.tickets.ticket48745_test:ticket48745_test.py:112 Default: can not retrieve an entry filter caseExactIA5Match with lowered stored value INFO tests.tickets.ticket48745_test:ticket48745_test.py:119 Default: can retrieve an entry filter caseIgnoreIA5Match with lowered stored value | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_init | 8.81 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48746_test:ticket48746_test.py:26 Initialization: add dummy entries for the tests | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_cis | 2.10 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48746_test:ticket48746_test.py:39 index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO tests.tickets.ticket48746_test:ticket48746_test.py:57 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO lib389:tasks.py:837 Index task index_attrs_06052021_014000 completed successfully INFO tests.tickets.ticket48746_test:ticket48746_test.py:63 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_homeDirectory_mixed_value | 0.07 | |
No log output captured. | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_extensible_search_after_index | 0.07 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48746_test:ticket48746_test.py:99 Default: can retrieve an entry filter caseExactIA5Match with exact stored value | |||
Passed | tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_ces | 4.23 | |
-------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48746_test:ticket48746_test.py:104 index homeDirectory in caseExactIA5Match, this would trigger the crash INFO tests.tickets.ticket48746_test:ticket48746_test.py:121 successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO lib389:tasks.py:837 Index task index_attrs_06052021_014003 completed successfully INFO tests.tickets.ticket48746_test:ticket48746_test.py:127 Check indexing succeeded with a specified matching rule | |||
Passed | tickets/ticket48799_test.py::test_ticket48799 | 43.34 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:169 Joining consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 462aefb6-e879-4dbc-ab44-4a7ef85254d3 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.topologies:topologies.py:174 Ensuring consumer consumer1 from supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 already exists -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48799_test:ticket48799_test.py:80 Test complete | |||
Passed | tickets/ticket48844_test.py::test_ticket48844_init | 9.76 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:backend.py:80 List backend with suffix=dc=bitwise,dc=com INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=TestBitw,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=TestBitw,cn=ldbm database,cn=plugins,cn=config cn: TestBitw nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/TestBitw nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=bitwise,dc=com objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="dc=bitwise,dc=com",cn=mapping tree,cn=config cn: dc=bitwise,dc=com nsslapd-backend: TestBitw nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=dc\3Dbitwise\2Cdc\3Dcom,cn=mapping tree,cn=config cn: dc=bitwise,dc=com nsslapd-backend: TestBitw nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree | |||
Passed | tickets/ticket48844_test.py::test_ticket48844_bitwise_on | 3.15 | |
No log output captured. | |||
Passed | tickets/ticket48844_test.py::test_ticket48844_bitwise_off | 7.06 | |
No log output captured. | |||
Passed | tickets/ticket48891_test.py::test_ticket48891_setup | 10.37 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48891_test.py:43 Bind as cn=Directory Manager INFO lib389:ticket48891_test.py:52 ######################### SETUP SUFFIX o=ticket48891.org ###################### INFO lib389:backend.py:80 List backend with suffix=dc=ticket48891.org INFO lib389:backend.py:290 Creating a local backend INFO lib389:backend.py:76 List backend cn=ticket48891,cn=ldbm database,cn=plugins,cn=config INFO lib389:__init__.py:1710 Found entry dn: cn=ticket48891,cn=ldbm database,cn=plugins,cn=config cn: ticket48891 nsslapd-cachememsize: 512000 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone1/db/ticket48891 nsslapd-dncachememsize: 16777216 nsslapd-readonly: off nsslapd-require-index: off nsslapd-require-internalop-index: off nsslapd-suffix: dc=ticket48891.org objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO lib389:mappingTree.py:153 Entry dn: cn="dc=ticket48891.org",cn=mapping tree,cn=config cn: dc=ticket48891.org nsslapd-backend: ticket48891 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO lib389:__init__.py:1710 Found entry dn: cn=dc\3Dticket48891.org,cn=mapping tree,cn=config cn: dc=ticket48891.org nsslapd-backend: ticket48891 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO lib389:ticket48891_test.py:61 ######################### Generate Test data ###################### INFO lib389:ticket48891_test.py:77 ######################### SEARCH ALL ###################### INFO lib389:ticket48891_test.py:78 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO lib389:ticket48891_test.py:82 Returned 10 entries. INFO lib389:ticket48891_test.py:86 10 person entries are successfully created under dc=ticket48891.org. | |||
Passed | tickets/ticket48893_test.py::test_ticket48893 | 10.38 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket48893_test:ticket48893_test.py:46 Test PASSED | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_setup | 8.29 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO lib389:ticket48906_test.py:63 Bind as cn=Directory Manager INFO lib389:ticket48906_test.py:83 ######################### SEARCH ALL ###################### INFO lib389:ticket48906_test.py:84 Bind as cn=Directory Manager and add the READ/SEARCH SELFDN aci INFO lib389:ticket48906_test.py:88 Returned 10 entries. INFO lib389:ticket48906_test.py:92 10 person entries are successfully created under dc=example,dc=com. | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_dblock_default | 0.32 | |
-------------------------------Captured log call-------------------------------- INFO lib389:ticket48906_test.py:149 ################################### INFO lib389:ticket48906_test.py:150 ### INFO lib389:ticket48906_test.py:151 ### Check that before any change config/monitor INFO lib389:ticket48906_test.py:152 ### contains the default value INFO lib389:ticket48906_test.py:153 ### INFO lib389:ticket48906_test.py:154 ################################### | |||
Passed | tickets/ticket48906_test.py::test_ticket48906_dblock_ldap_update | 2.99 | |
------------------------------Captured stdout call------------------------------ line locks:10000 expected_value 10000 value 10000 -------------------------------Captured log call-------------------------------- INFO lib389:ticket48906_test.py:160 ################################### INFO lib389:ticket48906_test.py:161 ### INFO lib389:ticket48906_test.py:162 ### Check that after ldap update INFO lib389:ticket48906_test.py:163 ### - monitor contains DEFAULT INFO lib389:ticket48906_test.py:164 ### - configured contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:165 ### - After stop dse.ldif contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:166 ### - After stop guardian contains DEFAULT INFO lib389:ticket48906_test.py:167 ### In fact guardian should differ from config to recreate the env INFO lib389:ticket48906_test.py:168 ### Check that after restart (DBenv recreated) INFO lib389:ticket48906_test.py:169 ### - monitor contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:170 ### - configured contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:171 ### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO lib389:ticket48906_test.py:172 ### INFO lib389:ticket48906_test.py:173 ################################### | |||
Passed | tickets/ticket49008_test.py::test_ticket49008 | 96.73 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 99ddf7dc-c5cb-4bf1-8bd3-99603ca914ee / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect c22e3c22-faec-4b07-b6f7-b4a3b096f57a / got description=99ddf7dc-c5cb-4bf1-8bd3-99603ca914ee) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 7d9f9778-9e75-4fa8-b5f1-bc64ab875cd0 / got description=c22e3c22-faec-4b07-b6f7-b4a3b096f57a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 30eb3615-fb59-417a-a02f-c331887a445a / got description=7d9f9778-9e75-4fa8-b5f1-bc64ab875cd0) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49008_test:ticket49008_test.py:115 ruv before fail: b'{replica 2 ldap://localhost.localdomain:39002} 60bb0feb000100020000 60bb1018000300020000' INFO tests.tickets.ticket49008_test:ticket49008_test.py:116 ruv after fail: b'{replica 2 ldap://localhost.localdomain:39002} 60bb0feb000100020000 60bb1018000300020000' | |||
Passed | tickets/ticket49020_test.py::test_ticket49020 | 81.04 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 81b29f1b-4853-4247-a26b-670c3157f9ef / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect cb543ac1-f7ee-4f1f-a32b-7e947d856047 / got description=81b29f1b-4853-4247-a26b-670c3157f9ef) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 83bfcc56-ecbc-4087-9bfb-7c47f35f3b30 / got description=cb543ac1-f7ee-4f1f-a32b-7e947d856047) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 13904d6c-537d-4a4e-87d8-049625460881 / got description=83bfcc56-ecbc-4087-9bfb-7c47f35f3b30) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1168 Starting total init cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config | |||
Passed | tickets/ticket49076_test.py::test_ticket49076 | 19.82 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49095_test.py::test_ticket49095 | 10.28 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49095_test:ticket49095_test.py:79 Test Passed | |||
Passed | tickets/ticket49122_test.py::test_ticket49122 | 24.79 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: nsrole=cn=empty,dc=example,dc=com INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (nsrole=cn=empty,dc=example,dc=com) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(nsrole=cn=empty,dc=example,dc=com)) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (!(nsrole=cn=empty,dc=example,dc=com)) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(sn=app*))(userpassword=*)) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(nsrole=cn=empty,dc=example,dc=com))(userpassword=*)) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(nsrole=cn=empty,dc=example,dc=com)(sn=app*))(userpassword=*)) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(objectclass=person)(sn=app*))(nsrole=cn=empty,dc=example,dc=com)) INFO tests.tickets.ticket49122_test:ticket49122_test.py:57 Testing filter: (&(|(&(cn=*)(objectclass=person)(nsrole=cn=empty,dc=example,dc=com)))(uid=*)) INFO lib389:ticket49122_test.py:86 Test Passed | |||
Passed | tickets/ticket49180_test.py::test_ticket49180 | 111.95 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier4 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39004, 'ldap-secureport': 63704, 'server-id': 'supplier4', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 5de40f2d-1017-4308-9c84-d6e5b17f09f1 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 0071d9ca-da3d-4adf-a475-117bdb7effd9 / got description=5de40f2d-1017-4308-9c84-d6e5b17f09f1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 6de472ed-19ad-4e54-9908-61e5fc0ec4f1 / got description=0071d9ca-da3d-4adf-a475-117bdb7effd9) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect ccc064d5-2aaa-44f2-b488-416166c15f6f / got description=6de472ed-19ad-4e54-9908-61e5fc0ec4f1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:156 Joining supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 94c61ce0-c446-4e70-bde8-9af1df554575 / got description=ccc064d5-2aaa-44f2-b488-416166c15f6f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect a260a76c-26d5-4d92-baa4-58834dd983dc / got description=94c61ce0-c446-4e70-bde8-9af1df554575) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier4 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier4 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier4 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49180_test:ticket49180_test.py:78 Running test_ticket49180... INFO tests.tickets.ticket49180_test:ticket49180_test.py:80 Check that replication works properly on all suppliers INFO tests.tickets.ticket49180_test:ticket49180_test.py:95 test_clean: disable supplier 4... INFO tests.tickets.ticket49180_test:ticket49180_test.py:30 test_clean: remove all the agreements to supplier 4... INFO lib389:agreement.py:1094 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:agreement.py:1094 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO lib389:agreement.py:1094 Agreement (cn=004,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) was successfully removed INFO tests.tickets.ticket49180_test:ticket49180_test.py:46 Restoring supplier 4... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is NOT working (expect 1be57965-a06a-4fcb-b86f-3fa04e31b57e / got description=a260a76c-26d5-4d92-baa4-58834dd983dc) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 1c86caf5-261e-443c-897e-2be8ec1a133c / got description=1be57965-a06a-4fcb-b86f-3fa04e31b57e) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39004 already exists INFO tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m2. INFO tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m3. INFO tests.tickets.ticket49180_test:ticket49180_test.py:59 Replication is working m1 -> m4. INFO tests.tickets.ticket49180_test:ticket49180_test.py:67 Replication is working m4 -> m1. INFO tests.tickets.ticket49180_test:ticket49180_test.py:73 Supplier 4 has been successfully restored. INFO tests.tickets.ticket49180_test:ticket49180_test.py:106 Errors found on m1: 0 INFO tests.tickets.ticket49180_test:ticket49180_test.py:111 Errors found on m2: 0 INFO tests.tickets.ticket49180_test:ticket49180_test.py:116 Errors found on m3: 0 | |||
Passed | tickets/ticket49184_test.py::test_ticket49184 | 14.94 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49184_test:ticket49184_test.py:89 create users and group... INFO tests.tickets.ticket49184_test:ticket49184_test.py:39 Adding members to the group... INFO tests.tickets.ticket49184_test:ticket49184_test.py:39 Adding members to the group... | |||
Passed | tickets/ticket49227_test.py::test_ticket49227 | 35.63 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49249_test.py::test_ticket49249 | 10.24 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49273_test.py::test_49273_corrupt_dbversion | 14.34 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket49290_test.py::test_49290_range_unindexed_notes | 14.52 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. ------------------------------Captured stderr call------------------------------ [05/Jun/2021:01:56:27.766734723 -0400] - INFO - ldbm_instance_config_cachememsize_set - force a minimal value 512000 [05/Jun/2021:01:56:27.777077226 -0400] - INFO - bdb_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [05/Jun/2021:01:56:27.780318181 -0400] - INFO - check_and_set_import_cache - pagesize: 4096, available bytes 7456743424, process usage 27049984 [05/Jun/2021:01:56:27.783226503 -0400] - INFO - check_and_set_import_cache - Import allocates 2912790KB import cache. [05/Jun/2021:01:56:27.931931793 -0400] - INFO - bdb_db2index - userRoot: Indexing attribute: modifytimestamp [05/Jun/2021:01:56:27.935041131 -0400] - ERR - libdb - BDB1566 txn_checkpoint interface requires an environment configured for the transaction subsystem [05/Jun/2021:01:56:27.938576661 -0400] - ERR - bdb_force_checkpoint - Checkpoint FAILED, error Unexpected dbimpl error code (22) [05/Jun/2021:01:56:27.946281190 -0400] - INFO - bdb_db2index - userRoot: Finished indexing. [05/Jun/2021:01:56:27.969189422 -0400] - INFO - bdb_pre_close - All database threads now stopped | |||
Passed | tickets/ticket49386_test.py::test_ticket49386 | 42.60 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_0,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_1,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_2,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_3,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_4,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_5,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_6,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_7,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_8,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49386_test:ticket49386_test.py:28 Adding user (cn=user_9,ou=people,dc=example,dc=com): INFO lib389:ticket49386_test.py:65 !!!!!!! cn=user_1,ou=people,dc=example,dc=com: memberof->b'cn=group_1,ou=groups,dc=example,dc=com' INFO lib389:ticket49386_test.py:66 !!!!!!! b'cn=group_1,ou=groups,dc=example,dc=com' INFO lib389:ticket49386_test.py:67 !!!!!!! cn=group_1,ou=groups,dc=example,dc=com INFO tests.tickets.ticket49386_test:ticket49386_test.py:130 memberof log found: [05/Jun/2021:01:57:36.950244547 -0400] - DEBUG - memberof-plugin - memberof_postop_modrdn: Skip modrdn operation because src/dst identical cn=group_1,ou=groups,dc=example,dc=com | |||
Passed | tickets/ticket49441_test.py::test_ticket49441 | 17.92 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49441_test:ticket49441_test.py:35 Position ldif files, and add indexes... INFO tests.tickets.ticket49441_test:ticket49441_test.py:52 Import LDIF with large indexed binary attributes... ERROR lib389:tasks.py:522 Error: import task import_06052021_015815 for file /var/lib/dirsrv/slapd-standalone1/ldifbinary.ldif exited with -23 INFO tests.tickets.ticket49441_test:ticket49441_test.py:61 Verify server is still running... INFO tests.tickets.ticket49441_test:ticket49441_test.py:68 Test PASSED | |||
Passed | tickets/ticket49460_test.py::test_ticket_49460 | 61.11 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect 8f68eeb0-b617-4db3-8537-5a87c70162c1 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 4d0a7912-14cd-4304-b2dc-959fe7e4362a / got description=8f68eeb0-b617-4db3-8537-5a87c70162c1) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect feab94f1-f97f-4a08-a895-ac4d3a15ed64 / got description=4d0a7912-14cd-4304-b2dc-959fe7e4362a) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 942e814a-ed4a-46f8-b9c6-604501d8c47c / got description=feab94f1-f97f-4a08-a895-ac4d3a15ed64) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user11,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user21,ou=people,dc=example,dc=com): CRITICAL tests.tickets.ticket49460_test:ticket49460_test.py:26 Adding user (cn=user31,ou=people,dc=example,dc=com): | |||
Passed | tickets/ticket49471_test.py::test_ticket49471 | 13.40 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket49471_test:ticket49471_test.py:28 Adding user (cn=user_1,ou=people,dc=example,dc=com): | |||
Passed | tickets/ticket49540_test.py::test_ticket49540 | 27.87 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49540_test:ticket49540_test.py:28 index homeDirectory INFO lib389:tasks.py:837 Index task index_attrs_06052021_020410 completed successfully INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 0 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> NO STATUS INFO tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 1 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> NO STATUS INFO tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 2 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> NO STATUS INFO tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 3 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> NO STATUS INFO tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 4 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:71 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> NO STATUS INFO tests.tickets.ticket49540_test:ticket49540_test.py:83 =========> Great it was expected in the middle of index INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 5 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 6 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 7 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 8 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' INFO tests.tickets.ticket49540_test:ticket49540_test.py:62 check_task_status =========> 9 th loop INFO tests.tickets.ticket49540_test:ticket49540_test.py:68 cn=index_attrs_06052021_020410,cn=index,cn=tasks,cn=config ---> b'userRoot: Finished indexing.' | |||
Passed | tickets/ticket49623_2_test.py::test_modrdn_loop | 10.44 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket49623_2_test:ticket49623_2_test.py:66 Check the log messages for cenotaph error | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_init | 48.41 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier2 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39002, 'ldap-secureport': 63702, 'server-id': 'supplier2', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier3 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39003, 'ldap-secureport': 63703, 'server-id': 'supplier3', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:142 Creating replication topology. INFO lib389.topologies:topologies.py:156 Joining supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is NOT working (expect fba9e825-290f-4fb8-aba3-2077310e30d5 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 8c8887b1-1ab6-4611-95cc-4018bdb027d6 / got description=fba9e825-290f-4fb8-aba3-2077310e30d5) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 INFO lib389.topologies:topologies.py:156 Joining supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is NOT working (expect 297935c1-af4a-488c-8e72-cb6e4dec146f / got description=8c8887b1-1ab6-4611-95cc-4018bdb027d6) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is working INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7b385db2-350e-42da-a5a3-2bda1a8bcc19 / got description=297935c1-af4a-488c-8e72-cb6e4dec146f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7b385db2-350e-42da-a5a3-2bda1a8bcc19 / got description=297935c1-af4a-488c-8e72-cb6e4dec146f) INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is NOT working (expect 7b385db2-350e-42da-a5a3-2bda1a8bcc19 / got description=297935c1-af4a-488c-8e72-cb6e4dec146f) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 is working INFO lib389.replica:replica.py:2153 SUCCESS: joined supplier from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier2 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier1 to supplier3 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier2 to supplier3 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 is was created INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier1 ... INFO lib389.replica:replica.py:2338 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 already exists INFO lib389.topologies:topologies.py:164 Ensuring supplier supplier3 to supplier2 ... INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39003 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39002 is was created -------------------------------Captured log call-------------------------------- CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=0,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=0,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=1,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=1,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=2,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=2,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=3,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=3,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=4,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=4,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=5,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=5,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=6,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=6,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=7,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=7,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=8,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=8,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=9,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=9,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=10,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=10,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=11,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=11,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=12,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=12,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=13,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=13,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=14,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=14,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=15,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=15,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=16,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=16,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=17,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=17,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=18,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=18,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:54 Adding user (employeeNumber=19,ou=distinguished,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:151 Adding employeeNumber=19,ou=distinguished,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_0,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_0,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_1,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_1,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_2,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_2,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_3,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_3,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_4,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_4,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_5,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_5,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_6,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_6,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_7,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_7,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_8,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_8,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_9,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_9,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_10,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_10,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_11,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_11,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_12,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_12,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_13,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_13,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_14,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_14,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_15,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_15,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_16,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_16,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_17,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_17,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_18,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_18,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_19,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_19,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_20,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_20,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_21,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_21,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_22,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_22,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_23,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_23,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_24,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_24,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_25,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_25,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_26,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_26,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_27,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_27,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_28,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_28,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_29,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_29,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_30,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_30,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_31,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_31,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_32,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_32,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_33,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_33,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_34,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_34,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_35,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_35,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_36,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_36,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_37,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_37,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_38,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_38,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_39,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_39,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_40,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_40,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_41,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_41,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_42,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_42,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_43,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_43,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_44,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_44,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_45,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_45,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_46,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_46,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_47,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_47,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_48,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_48,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_49,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_49,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_50,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_50,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_51,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_51,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_52,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_52,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_53,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_53,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_54,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_54,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_55,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_55,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_56,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_56,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_57,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_57,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_58,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_58,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_59,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_59,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_60,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_60,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_61,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_61,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_62,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_62,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_63,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_63,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_64,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_64,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_65,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_65,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_66,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_66,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_67,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_67,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_68,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_68,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_69,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_69,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_70,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_70,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_71,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_71,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_72,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_72,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_73,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_73,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_74,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_74,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_75,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_75,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_76,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_76,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_77,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_77,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_78,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_78,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_79,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_79,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_80,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_80,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_81,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_81,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_82,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_82,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_83,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_83,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_84,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_84,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_85,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_85,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_86,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_86,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_87,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_87,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_88,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_88,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_89,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_89,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_90,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_90,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_91,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_91,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_92,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_92,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_93,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_93,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_94,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_94,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_95,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_95,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_96,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_96,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_97,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_97,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_98,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_98,ou=regular,ou=people,dc=example,dc=com on M3 CRITICAL tests.tickets.ticket49658_test:ticket49658_test.py:39 Adding user (uid=user_99,ou=regular,ou=people,dc=example,dc=com): INFO tests.tickets.ticket49658_test:ticket49658_test.py:159 Adding uid=user_99,ou=regular,ou=people,dc=example,dc=com on M3 | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_0 | 33.65 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:289 Search M1 employeeNumber=b'0' (vs. b'0') INFO tests.tickets.ticket49658_test:ticket49658_test.py:295 Search M2 employeeNumber=b'0' (vs. b'0') INFO tests.tickets.ticket49658_test:ticket49658_test.py:303 Search M3 employeeNumber=b'0' (vs. b'0') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_1 | 33.62 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:395 Search M1 employeeNumber=b'1' (vs. b'1') INFO tests.tickets.ticket49658_test:ticket49658_test.py:401 Search M2 employeeNumber=b'1' (vs. b'1') INFO tests.tickets.ticket49658_test:ticket49658_test.py:409 Search M3 employeeNumber=b'1' (vs. b'1') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_2 | 33.64 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:501 Search M1 employeeNumber=b'2' (vs. b'2') INFO tests.tickets.ticket49658_test:ticket49658_test.py:507 Search M2 employeeNumber=b'2' (vs. b'2') INFO tests.tickets.ticket49658_test:ticket49658_test.py:515 Search M3 employeeNumber=b'2' (vs. b'2') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_3 | 33.61 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:607 Search M1 employeeNumber=b'3' (vs. b'3') INFO tests.tickets.ticket49658_test:ticket49658_test.py:613 Search M2 employeeNumber=b'3' (vs. b'3') INFO tests.tickets.ticket49658_test:ticket49658_test.py:621 Search M3 employeeNumber=b'3' (vs. b'3') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_4 | 33.62 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:713 Search M1 employeeNumber=b'4' (vs. b'4') INFO tests.tickets.ticket49658_test:ticket49658_test.py:719 Search M2 employeeNumber=b'4' (vs. b'4') INFO tests.tickets.ticket49658_test:ticket49658_test.py:727 Search M3 employeeNumber=b'4' (vs. b'4') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_5 | 33.62 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:819 Search M1 employeeNumber=b'5' (vs. b'5') INFO tests.tickets.ticket49658_test:ticket49658_test.py:825 Search M2 employeeNumber=b'5' (vs. b'5') INFO tests.tickets.ticket49658_test:ticket49658_test.py:833 Search M3 employeeNumber=b'5' (vs. b'5') | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_6 | 33.57 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:934 Search M1 employeeNumber=b'6.2' (vs. 6.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:940 Search M2 employeeNumber=b'6.2' (vs. 6.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:948 Search M3 employeeNumber=b'6.2' (vs. 6.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_7 | 33.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1042 Search M1 employeeNumber=b'7.2' (vs. 7.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1048 Search M2 employeeNumber=b'7.2' (vs. 7.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1056 Search M3 employeeNumber=b'7.2' (vs. 7.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_8 | 33.61 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1150 Search M1 employeeNumber=b'8.2' (vs. 8.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1156 Search M2 employeeNumber=b'8.2' (vs. 8.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1164 Search M3 employeeNumber=b'8.2' (vs. 8.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_9 | 33.62 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1259 Search M1 employeeNumber=b'9.2' (vs. 9.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1265 Search M2 employeeNumber=b'9.2' (vs. 9.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1273 Search M3 employeeNumber=b'9.2' (vs. 9.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_10 | 33.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1369 Search M1 employeeNumber=b'10.2' (vs. 10.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1375 Search M2 employeeNumber=b'10.2' (vs. 10.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1383 Search M3 employeeNumber=b'10.2' (vs. 10.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_11 | 33.61 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1479 Search M1 employeeNumber=b'11.1' (vs. 11.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1485 Search M2 employeeNumber=b'11.1' (vs. 11.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1493 Search M3 employeeNumber=b'11.1' (vs. 11.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_12 | 33.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1594 Search M1 employeeNumber=b'12.1' (vs. 12.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1600 Search M2 employeeNumber=b'12.1' (vs. 12.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1608 Search M3 employeeNumber=b'12.1' (vs. 12.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_13 | 33.55 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1709 Search M1 employeeNumber=b'13.1' (vs. 13.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1715 Search M2 employeeNumber=b'13.1' (vs. 13.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1723 Search M3 employeeNumber=b'13.1' (vs. 13.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_14 | 33.63 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1825 Search M1 employeeNumber=b'14.1' (vs. 14.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1831 Search M2 employeeNumber=b'14.1' (vs. 14.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1839 Search M3 employeeNumber=b'14.1' (vs. 14.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_15 | 33.54 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:1940 Search M1 employeeNumber=b'15.1' (vs. 15.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1946 Search M2 employeeNumber=b'15.1' (vs. 15.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:1954 Search M3 employeeNumber=b'15.1' (vs. 15.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_16 | 37.66 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2076 Search M1 employeeNumber=b'1.1' (vs. 1.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2082 Search M2 employeeNumber=b'1.1' (vs. 1.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2090 Search M3 employeeNumber=b'1.1' (vs. 1.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_17 | 37.67 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2183 Search M1 employeeNumber=b'2.2' (vs. 2.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2189 Search M2 employeeNumber=b'2.2' (vs. 2.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2197 Search M3 employeeNumber=b'2.2' (vs. 2.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_18 | 37.67 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2289 Search M1 employeeNumber=b'3.2' (vs. 3.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2295 Search M2 employeeNumber=b'3.2' (vs. 3.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2303 Search M3 employeeNumber=b'3.2' (vs. 3.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_19 | 38.66 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2405 Search M1 employeeNumber=b'4.1' (vs. 4.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2411 Search M2 employeeNumber=b'4.1' (vs. 4.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2419 Search M3 employeeNumber=b'4.1' (vs. 4.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_20 | 38.66 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2520 Search M1 employeeNumber=b'5.1' (vs. 5.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2526 Search M2 employeeNumber=b'5.1' (vs. 5.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2534 Search M3 employeeNumber=b'5.1' (vs. 5.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_21 | 38.66 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2635 Search M1 employeeNumber=b'6.1' (vs. 6.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2641 Search M2 employeeNumber=b'6.1' (vs. 6.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2649 Search M3 employeeNumber=b'6.1' (vs. 6.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_22 | 38.65 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2750 Search M1 employeeNumber=b'7.1' (vs. 7.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2756 Search M2 employeeNumber=b'7.1' (vs. 7.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2764 Search M3 employeeNumber=b'7.1' (vs. 7.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_23 | 39.68 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2873 Search M1 employeeNumber=b'8.2' (vs. 8.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2879 Search M2 employeeNumber=b'8.2' (vs. 8.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:2887 Search M3 employeeNumber=b'8.2' (vs. 8.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_24 | 39.70 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:2996 Search M1 employeeNumber=b'9.2' (vs. 9.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3002 Search M2 employeeNumber=b'9.2' (vs. 9.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3010 Search M3 employeeNumber=b'9.2' (vs. 9.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_25 | 39.67 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3119 Search M1 employeeNumber=b'10.2' (vs. 10.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3125 Search M2 employeeNumber=b'10.2' (vs. 10.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3133 Search M3 employeeNumber=b'10.2' (vs. 10.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_26 | 39.68 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3242 Search M1 employeeNumber=b'11.2' (vs. 11.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3248 Search M2 employeeNumber=b'11.2' (vs. 11.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3256 Search M3 employeeNumber=b'11.2' (vs. 11.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_27 | 39.66 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3365 Search M1 employeeNumber=b'12.2' (vs. 12.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3371 Search M2 employeeNumber=b'12.2' (vs. 12.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3379 Search M3 employeeNumber=b'12.2' (vs. 12.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_28 | 39.67 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3488 Search M1 employeeNumber=b'13.2' (vs. 13.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3494 Search M2 employeeNumber=b'13.2' (vs. 13.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3502 Search M3 employeeNumber=b'13.2' (vs. 13.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_29 | 39.68 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3612 Search M1 employeeNumber=b'14.2' (vs. 14.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3618 Search M2 employeeNumber=b'14.2' (vs. 14.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3626 Search M3 employeeNumber=b'14.2' (vs. 14.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_30 | 39.68 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3735 Search M1 employeeNumber=b'15.2' (vs. 15.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3741 Search M2 employeeNumber=b'15.2' (vs. 15.2) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3749 Search M3 employeeNumber=b'15.2' (vs. 15.2) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_31 | 40.73 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3866 Search M1 employeeNumber=b'16.1' (vs. 16.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3872 Search M2 employeeNumber=b'16.1' (vs. 16.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:3880 Search M3 employeeNumber=b'16.1' (vs. 16.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_32 | 40.70 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:3998 Search M1 employeeNumber=b'17.1' (vs. 17.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:4004 Search M2 employeeNumber=b'17.1' (vs. 17.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:4012 Search M3 employeeNumber=b'17.1' (vs. 17.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_33 | 40.66 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:4122 Search M1 employeeNumber=b'18.1' (vs. 18.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:4128 Search M2 employeeNumber=b'18.1' (vs. 18.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:4136 Search M3 employeeNumber=b'18.1' (vs. 18.1) | |||
Passed | tickets/ticket49658_test.py::test_ticket49658_34 | 44.67 | |
-------------------------------Captured log call-------------------------------- INFO lib389:agreement.py:1193 Pausing replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1193 Pausing replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=002,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=001,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO lib389:agreement.py:1219 Resuming replication cn=003,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO tests.tickets.ticket49658_test:ticket49658_test.py:4246 Search M1 employeeNumber=b'19.1' (vs. 19.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:4252 Search M2 employeeNumber=b'19.1' (vs. 19.1) INFO tests.tickets.ticket49658_test:ticket49658_test.py:4260 Search M3 employeeNumber=b'19.1' (vs. 19.1) | |||
Passed | tickets/ticket49788_test.py::test_ticket49781 | 10.36 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. | |||
Passed | tickets/ticket50078_test.py::test_ticket50078 | 48.41 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for supplier1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39001, 'ldap-secureport': 63701, 'server-id': 'supplier1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for hub1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39101, 'ldap-secureport': 63801, 'server-id': 'hub1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for consumer1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 39201, 'ldap-secureport': 63901, 'server-id': 'consumer1', 'suffix': 'dc=example,dc=com'} was created. INFO lib389.topologies:topologies.py:524 Creating replication topology. INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is was created INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is NOT working (expect 1a498348-d33c-4c77-924c-665828d72691 / got description=None) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 is working INFO lib389.replica:replica.py:2211 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 INFO lib389.replica:replica.py:2084 SUCCESS: bootstrap to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 completed INFO lib389.replica:replica.py:2365 SUCCESS: Agreement from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is was created INFO lib389.replica:replica.py:2268 SUCCESS: joined consumer from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39101 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 INFO lib389.replica:replica.py:2498 Retry: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is NOT working (expect 13d150bf-2d67-4b66-84b5-35778e8673cd / got description=1a498348-d33c-4c77-924c-665828d72691) INFO lib389.replica:replica.py:2496 SUCCESS: Replication from ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39001 to ldap://ci-vm-10-0-139-64.hosted.upshift.rdu2.redhat.com:39201 is working -------------------------------Captured log call-------------------------------- INFO tests.tickets.ticket50078_test:ticket50078_test.py:31 Replication is working. INFO tests.tickets.ticket50078_test:ticket50078_test.py:47 Rename the test entry test_user1... INFO tests.tickets.ticket50078_test:ticket50078_test.py:52 Replication is working. | |||
Passed | tickets/ticket50234_test.py::test_ticket50234 | 10.30 | |
-------------------------------Captured log setup------------------------------- INFO lib389.SetupDs:setup.py:658 Starting installation... INFO lib389.SetupDs:setup.py:686 Completed installation for standalone1 INFO lib389.topologies:topologies.py:109 Instance with parameters {'ldap-port': 38901, 'ldap-secureport': 63601, 'server-id': 'standalone1', 'suffix': 'dc=example,dc=com'} was created. |